diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md index 60a85f5bda76..e2c7684444e8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md @@ -1,6 +1,8 @@ # Release History -## 1.0.0-beta.30 (Unreleased) +## 1.0.0-beta.1 (2024-07-22) + +- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). ### Features Added diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/README.md b/sdk/datafactory/azure-resourcemanager-datafactory/README.md index 94f4838fce87..afff5f187a48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/README.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/README.md @@ -32,7 +32,7 @@ Various documentation is available to help you get started com.azure.resourcemanager azure-resourcemanager-datafactory - 1.0.0-beta.29 + 1.0.0-beta.30 ``` [//]: # ({x-version-update-end}) @@ -45,15 +45,11 @@ Azure Management Libraries require a `TokenCredential` implementation for authen ### Authentication -By default, Microsoft Entra ID token authentication depends on correct configuration of the following environment variables. +Microsoft Entra ID token authentication relies on the [credential class][azure_identity_credentials] from [Azure Identity][azure_identity] package. -- `AZURE_CLIENT_ID` for Azure client ID. -- `AZURE_TENANT_ID` for Azure tenant ID. -- `AZURE_CLIENT_SECRET` or `AZURE_CLIENT_CERTIFICATE_PATH` for client secret or client certificate. +Azure subscription ID can be configured via `AZURE_SUBSCRIPTION_ID` environment variable. -In addition, Azure subscription ID can be configured via `AZURE_SUBSCRIPTION_ID` environment variable. - -With above configuration, `azure` client can be authenticated using the following code: +Assuming the use of the `DefaultAzureCredential` credential class, the client can be authenticated using the following code: ```java AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); @@ -184,6 +180,7 @@ This project has adopted the [Microsoft Open Source Code of Conduct][coc]. For m [jdk]: https://learn.microsoft.com/azure/developer/java/fundamentals/ [azure_subscription]: https://azure.microsoft.com/free/ [azure_identity]: https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/identity/azure-identity +[azure_identity_credentials]: https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/identity/azure-identity#credentials [azure_core_http_netty]: https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/core/azure-core-http-netty [authenticate]: https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/resourcemanager/docs/AUTH.md [design]: https://github.com/Azure/azure-sdk-for-java/blob/main/sdk/resourcemanager/docs/DESIGN.md diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml index b0b6ee142029..855358dd3926 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml +++ b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml @@ -48,6 +48,11 @@ true + + com.azure + azure-json + 1.1.0 + com.azure azure-core diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java index 9b1f74961b2a..c2afc71e4881 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java @@ -294,7 +294,7 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile .append("-") .append("com.azure.resourcemanager.datafactory") .append("/") - .append("1.0.0-beta.29"); + .append("1.0.0-beta.1"); if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) { userAgentBuilder.append(" (") .append(Configuration.getGlobalConfiguration().get("java.version")) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AccessPolicyResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AccessPolicyResponseInner.java index 9e7b7a1b5f2f..906f91807fe1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AccessPolicyResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AccessPolicyResponseInner.java @@ -5,30 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.UserAccessPolicy; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Get Data Plane read only token response definition. */ @Fluent -public final class AccessPolicyResponseInner { +public final class AccessPolicyResponseInner implements JsonSerializable { /* * The user access policy. */ - @JsonProperty(value = "policy") private UserAccessPolicy policy; /* * Data Plane read only access token. */ - @JsonProperty(value = "accessToken") private String accessToken; /* * Data Plane service base URL. */ - @JsonProperty(value = "dataPlaneUrl") private String dataPlaneUrl; /** @@ -107,4 +108,46 @@ public void validate() { policy().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("policy", this.policy); + jsonWriter.writeStringField("accessToken", this.accessToken); + jsonWriter.writeStringField("dataPlaneUrl", this.dataPlaneUrl); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AccessPolicyResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AccessPolicyResponseInner if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AccessPolicyResponseInner. + */ + public static AccessPolicyResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AccessPolicyResponseInner deserializedAccessPolicyResponseInner = new AccessPolicyResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("policy".equals(fieldName)) { + deserializedAccessPolicyResponseInner.policy = UserAccessPolicy.fromJson(reader); + } else if ("accessToken".equals(fieldName)) { + deserializedAccessPolicyResponseInner.accessToken = reader.getString(); + } else if ("dataPlaneUrl".equals(fieldName)) { + deserializedAccessPolicyResponseInner.dataPlaneUrl = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAccessPolicyResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java index 9141559dd3f1..9c2f9aeb5e4d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ActivityRunsQueryResponseInner.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ActivityRun; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list activity runs. */ @Fluent -public final class ActivityRunsQueryResponseInner { +public final class ActivityRunsQueryResponseInner implements JsonSerializable { /* * List of activity runs. */ - @JsonProperty(value = "value", required = true) private List value; /* * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /** @@ -91,4 +93,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ActivityRunsQueryResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ActivityRunsQueryResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ActivityRunsQueryResponseInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ActivityRunsQueryResponseInner. + */ + public static ActivityRunsQueryResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ActivityRunsQueryResponseInner deserializedActivityRunsQueryResponseInner + = new ActivityRunsQueryResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> ActivityRun.fromJson(reader1)); + deserializedActivityRunsQueryResponseInner.value = value; + } else if ("continuationToken".equals(fieldName)) { + deserializedActivityRunsQueryResponseInner.continuationToken = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedActivityRunsQueryResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AddDataFlowToDebugSessionResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AddDataFlowToDebugSessionResponseInner.java index 3fe2c1ff3b9b..1a2565770b8b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AddDataFlowToDebugSessionResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AddDataFlowToDebugSessionResponseInner.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Response body structure for starting data flow debug session. */ @Fluent -public final class AddDataFlowToDebugSessionResponseInner { +public final class AddDataFlowToDebugSessionResponseInner + implements JsonSerializable { /* * The ID of data flow debug job version. */ - @JsonProperty(value = "jobVersion") private String jobVersion; /** @@ -51,4 +55,41 @@ public AddDataFlowToDebugSessionResponseInner withJobVersion(String jobVersion) */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("jobVersion", this.jobVersion); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AddDataFlowToDebugSessionResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AddDataFlowToDebugSessionResponseInner if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AddDataFlowToDebugSessionResponseInner. + */ + public static AddDataFlowToDebugSessionResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AddDataFlowToDebugSessionResponseInner deserializedAddDataFlowToDebugSessionResponseInner + = new AddDataFlowToDebugSessionResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("jobVersion".equals(fieldName)) { + deserializedAddDataFlowToDebugSessionResponseInner.jobVersion = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAddDataFlowToDebugSessionResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java index 1c501ee290f7..0c15d4ff1fb9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonMwsLinkedServiceTypeProperties.java @@ -6,75 +6,70 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon Marketplace Web Service linked service properties. */ @Fluent -public final class AmazonMwsLinkedServiceTypeProperties { +public final class AmazonMwsLinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, * separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */ - @JsonProperty(value = "marketplaceID", required = true) private Object marketplaceId; /* * The Amazon seller ID. */ - @JsonProperty(value = "sellerID", required = true) private Object sellerId; /* * The Amazon MWS authentication token. */ - @JsonProperty(value = "mwsAuthToken") private SecretBase mwsAuthToken; /* * The access key id used to access data. */ - @JsonProperty(value = "accessKeyId", required = true) private Object accessKeyId; /* * The secret key used to access data. */ - @JsonProperty(value = "secretKey") private SecretBase secretKey; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -328,4 +323,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonMwsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("marketplaceID", this.marketplaceId); + jsonWriter.writeUntypedField("sellerID", this.sellerId); + jsonWriter.writeUntypedField("accessKeyId", this.accessKeyId); + jsonWriter.writeJsonField("mwsAuthToken", this.mwsAuthToken); + jsonWriter.writeJsonField("secretKey", this.secretKey); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonMwsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonMwsLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonMwsLinkedServiceTypeProperties. + */ + public static AmazonMwsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonMwsLinkedServiceTypeProperties deserializedAmazonMwsLinkedServiceTypeProperties + = new AmazonMwsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("marketplaceID".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.marketplaceId = reader.readUntyped(); + } else if ("sellerID".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.sellerId = reader.readUntyped(); + } else if ("accessKeyId".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.accessKeyId = reader.readUntyped(); + } else if ("mwsAuthToken".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.mwsAuthToken = SecretBase.fromJson(reader); + } else if ("secretKey".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.secretKey = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonMwsLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonMwsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java index 8a913b44b1e0..b0682f3245e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * AmazonRdsForOracle database linked service properties. */ @Fluent -public final class AmazonRdsForLinkedServiceTypeProperties { +public final class AmazonRdsForLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -120,4 +122,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonRdsForLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRdsForLinkedServiceTypeProperties. + */ + public static AmazonRdsForLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForLinkedServiceTypeProperties deserializedAmazonRdsForLinkedServiceTypeProperties + = new AmazonRdsForLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAmazonRdsForLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAmazonRdsForLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonRdsForLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRdsForLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForOracleTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForOracleTableDatasetTypeProperties.java index 1167f3f25fb3..ec433462da5c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForOracleTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForOracleTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * AmazonRdsForOracle dataset properties. */ @Fluent -public final class AmazonRdsForOracleTableDatasetTypeProperties { +public final class AmazonRdsForOracleTableDatasetTypeProperties + implements JsonSerializable { /* * The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +84,44 @@ public AmazonRdsForOracleTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForOracleTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForOracleTableDatasetTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForOracleTableDatasetTypeProperties. + */ + public static AmazonRdsForOracleTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForOracleTableDatasetTypeProperties deserializedAmazonRdsForOracleTableDatasetTypeProperties + = new AmazonRdsForOracleTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRdsForOracleTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java index 97c8017999b4..3f69aa1cc079 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerLinkedServiceTypeProperties.java @@ -5,11 +5,14 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedProperties; import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon Rds for SQL Server linked service properties. @@ -19,38 +22,32 @@ public final class AmazonRdsForSqlServerLinkedServiceTypeProperties extends SqlS /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private AmazonRdsForSqlAuthenticationType authenticationType; /* * The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The on-premises Windows authentication password. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Sql always encrypted properties. */ - @JsonProperty(value = "alwaysEncryptedSettings") private SqlAlwaysEncryptedProperties alwaysEncryptedSettings; /** @@ -374,4 +371,130 @@ public void validate() { alwaysEncryptedSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", server()); + jsonWriter.writeUntypedField("database", database()); + jsonWriter.writeUntypedField("encrypt", encrypt()); + jsonWriter.writeUntypedField("trustServerCertificate", trustServerCertificate()); + jsonWriter.writeUntypedField("hostNameInCertificate", hostnameInCertificate()); + jsonWriter.writeUntypedField("applicationIntent", applicationIntent()); + jsonWriter.writeUntypedField("connectTimeout", connectTimeout()); + jsonWriter.writeUntypedField("connectRetryCount", connectRetryCount()); + jsonWriter.writeUntypedField("connectRetryInterval", connectRetryInterval()); + jsonWriter.writeUntypedField("loadBalanceTimeout", loadBalanceTimeout()); + jsonWriter.writeUntypedField("commandTimeout", commandTimeout()); + jsonWriter.writeUntypedField("integratedSecurity", integratedSecurity()); + jsonWriter.writeUntypedField("failoverPartner", failoverPartner()); + jsonWriter.writeUntypedField("maxPoolSize", maxPoolSize()); + jsonWriter.writeUntypedField("minPoolSize", minPoolSize()); + jsonWriter.writeUntypedField("multipleActiveResultSets", multipleActiveResultSets()); + jsonWriter.writeUntypedField("multiSubnetFailover", multiSubnetFailover()); + jsonWriter.writeUntypedField("packetSize", packetSize()); + jsonWriter.writeUntypedField("pooling", pooling()); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("alwaysEncryptedSettings", this.alwaysEncryptedSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForSqlServerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForSqlServerLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForSqlServerLinkedServiceTypeProperties. + */ + public static AmazonRdsForSqlServerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForSqlServerLinkedServiceTypeProperties deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + = new AmazonRdsForSqlServerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withServer(reader.readUntyped()); + } else if ("database".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withDatabase(reader.readUntyped()); + } else if ("encrypt".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withEncrypt(reader.readUntyped()); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withTrustServerCertificate(reader.readUntyped()); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withHostnameInCertificate(reader.readUntyped()); + } else if ("applicationIntent".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withApplicationIntent(reader.readUntyped()); + } else if ("connectTimeout".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withConnectTimeout(reader.readUntyped()); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withConnectRetryCount(reader.readUntyped()); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withConnectRetryInterval(reader.readUntyped()); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withLoadBalanceTimeout(reader.readUntyped()); + } else if ("commandTimeout".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withCommandTimeout(reader.readUntyped()); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withIntegratedSecurity(reader.readUntyped()); + } else if ("failoverPartner".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withFailoverPartner(reader.readUntyped()); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withMaxPoolSize(reader.readUntyped()); + } else if ("minPoolSize".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withMinPoolSize(reader.readUntyped()); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withMultipleActiveResultSets(reader.readUntyped()); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties + .withMultiSubnetFailover(reader.readUntyped()); + } else if ("packetSize".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withPacketSize(reader.readUntyped()); + } else if ("pooling".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.withPooling(reader.readUntyped()); + } else if ("connectionString".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.connectionString + = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.authenticationType + = AmazonRdsForSqlAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else if ("alwaysEncryptedSettings".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties.alwaysEncryptedSettings + = SqlAlwaysEncryptedProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRdsForSqlServerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerTableDatasetTypeProperties.java index cafa3a775214..2d29e3d41cec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForSqlServerTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The Amazon RDS for SQL Server dataset properties. */ @Fluent -public final class AmazonRdsForSqlServerTableDatasetTypeProperties { +public final class AmazonRdsForSqlServerTableDatasetTypeProperties + implements JsonSerializable { /* * The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +84,44 @@ public AmazonRdsForSqlServerTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForSqlServerTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForSqlServerTableDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForSqlServerTableDatasetTypeProperties. + */ + public static AmazonRdsForSqlServerTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForSqlServerTableDatasetTypeProperties deserializedAmazonRdsForSqlServerTableDatasetTypeProperties + = new AmazonRdsForSqlServerTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRdsForSqlServerTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java index 6ff0be04434a..575bf5e7c626 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftLinkedServiceTypeProperties.java @@ -6,50 +6,49 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon Redshift linked service properties. */ @Fluent -public final class AmazonRedshiftLinkedServiceTypeProperties { +public final class AmazonRedshiftLinkedServiceTypeProperties + implements JsonSerializable { /* * The name of the Amazon Redshift server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server", required = true) private Object server; /* * The username of the Amazon Redshift source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * The password of the Amazon Redshift source. */ - @JsonProperty(value = "password") private SecretBase password; /* * The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database", required = true) private Object database; /* * The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is * 5439. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "port") private Object port; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -210,4 +209,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonRedshiftLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRedshiftLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRedshiftLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRedshiftLinkedServiceTypeProperties. + */ + public static AmazonRedshiftLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRedshiftLinkedServiceTypeProperties deserializedAmazonRedshiftLinkedServiceTypeProperties + = new AmazonRedshiftLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("port".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonRedshiftLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRedshiftLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftTableDatasetTypeProperties.java index 11b24566e84b..808fdc742dc0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRedshiftTableDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Amazon Redshift table dataset properties. */ @Fluent -public final class AmazonRedshiftTableDatasetTypeProperties { +public final class AmazonRedshiftTableDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The Amazon Redshift table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The Amazon Redshift schema name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +107,47 @@ public AmazonRedshiftTableDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRedshiftTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRedshiftTableDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRedshiftTableDatasetTypeProperties. + */ + public static AmazonRedshiftTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRedshiftTableDatasetTypeProperties deserializedAmazonRedshiftTableDatasetTypeProperties + = new AmazonRedshiftTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAmazonRedshiftTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAmazonRedshiftTableDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedAmazonRedshiftTableDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRedshiftTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java index d9c44432fe3b..55519786c691 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3CompatibleLinkedServiceTypeProperties.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon S3 Compatible linked service properties. */ @Fluent -public final class AmazonS3CompatibleLinkedServiceTypeProperties { +public final class AmazonS3CompatibleLinkedServiceTypeProperties + implements JsonSerializable { /* * The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "accessKeyId") private Object accessKeyId; /* * The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. */ - @JsonProperty(value = "secretAccessKey") private SecretBase secretAccessKey; /* @@ -31,21 +34,18 @@ public final class AmazonS3CompatibleLinkedServiceTypeProperties { * property; change it only if you want to try a different service endpoint or want to switch between https and * http. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* * If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "forcePathStyle") private Object forcePathStyle; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -176,4 +176,54 @@ public void validate() { secretAccessKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accessKeyId", this.accessKeyId); + jsonWriter.writeJsonField("secretAccessKey", this.secretAccessKey); + jsonWriter.writeUntypedField("serviceUrl", this.serviceUrl); + jsonWriter.writeUntypedField("forcePathStyle", this.forcePathStyle); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3CompatibleLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3CompatibleLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3CompatibleLinkedServiceTypeProperties. + */ + public static AmazonS3CompatibleLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3CompatibleLinkedServiceTypeProperties deserializedAmazonS3CompatibleLinkedServiceTypeProperties + = new AmazonS3CompatibleLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accessKeyId".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedServiceTypeProperties.accessKeyId = reader.readUntyped(); + } else if ("secretAccessKey".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedServiceTypeProperties.secretAccessKey + = SecretBase.fromJson(reader); + } else if ("serviceUrl".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedServiceTypeProperties.serviceUrl = reader.readUntyped(); + } else if ("forcePathStyle".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedServiceTypeProperties.forcePathStyle = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonS3CompatibleLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java index 43bfe345ff1b..c8ca7abdf8f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3DatasetTypeProperties.java @@ -6,61 +6,57 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon S3 dataset properties. */ @Fluent -public final class AmazonS3DatasetTypeProperties { +public final class AmazonS3DatasetTypeProperties implements JsonSerializable { /* * The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "bucketName", required = true) private Object bucketName; /* * The key of the Amazon S3 object. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "key") private Object key; /* * The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * The version for the S3 object. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /* * The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * The format of files. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * The data compression method used for the Amazon S3 object. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -257,4 +253,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonS3DatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + jsonWriter.writeUntypedField("key", this.key); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("version", this.version); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3DatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3DatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonS3DatasetTypeProperties. + */ + public static AmazonS3DatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3DatasetTypeProperties deserializedAmazonS3DatasetTypeProperties + = new AmazonS3DatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("bucketName".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.bucketName = reader.readUntyped(); + } else if ("key".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.key = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.prefix = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.version = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedAmazonS3DatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonS3DatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java index aaafe4cabf66..49ade57c577d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonS3LinkedServiceTypeProperties.java @@ -5,32 +5,34 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Amazon S3 linked service properties. */ @Fluent -public final class AmazonS3LinkedServiceTypeProperties { +public final class AmazonS3LinkedServiceTypeProperties + implements JsonSerializable { /* * The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "accessKeyId") private Object accessKeyId; /* * The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */ - @JsonProperty(value = "secretAccessKey") private SecretBase secretAccessKey; /* @@ -38,20 +40,17 @@ public final class AmazonS3LinkedServiceTypeProperties { * if you want to try a different service endpoint or want to switch between https and http. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* * The session token for the S3 temporary security credential. */ - @JsonProperty(value = "sessionToken") private SecretBase sessionToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -205,4 +204,56 @@ public void validate() { sessionToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("accessKeyId", this.accessKeyId); + jsonWriter.writeJsonField("secretAccessKey", this.secretAccessKey); + jsonWriter.writeUntypedField("serviceUrl", this.serviceUrl); + jsonWriter.writeJsonField("sessionToken", this.sessionToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3LinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3LinkedServiceTypeProperties. + */ + public static AmazonS3LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3LinkedServiceTypeProperties deserializedAmazonS3LinkedServiceTypeProperties + = new AmazonS3LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("authenticationType".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("accessKeyId".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.accessKeyId = reader.readUntyped(); + } else if ("secretAccessKey".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.secretAccessKey = SecretBase.fromJson(reader); + } else if ("serviceUrl".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.serviceUrl = reader.readUntyped(); + } else if ("sessionToken".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.sessionToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAmazonS3LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonS3LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java index bd82a9826ec1..a38d510ec994 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppFiguresLinkedServiceTypeProperties.java @@ -6,30 +6,32 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * AppFigures linked service type properties. */ @Fluent -public final class AppFiguresLinkedServiceTypeProperties { +public final class AppFiguresLinkedServiceTypeProperties + implements JsonSerializable { /* * The username of the Appfigures source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName", required = true) private Object username; /* * The password of the AppFigures source. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /* * The client key for the AppFigures source. */ - @JsonProperty(value = "clientKey", required = true) private SecretBase clientKey; /** @@ -128,4 +130,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AppFiguresLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("clientKey", this.clientKey); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AppFiguresLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AppFiguresLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AppFiguresLinkedServiceTypeProperties. + */ + public static AppFiguresLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AppFiguresLinkedServiceTypeProperties deserializedAppFiguresLinkedServiceTypeProperties + = new AppFiguresLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("userName".equals(fieldName)) { + deserializedAppFiguresLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAppFiguresLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("clientKey".equals(fieldName)) { + deserializedAppFiguresLinkedServiceTypeProperties.clientKey = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAppFiguresLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java index 30f90a79862c..95d8432ac501 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AppendVariableActivityTypeProperties.java @@ -5,24 +5,27 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * AppendVariable activity properties. */ @Fluent -public final class AppendVariableActivityTypeProperties { +public final class AppendVariableActivityTypeProperties + implements JsonSerializable { /* * Name of the variable whose value needs to be appended to. */ - @JsonProperty(value = "variableName") private String variableName; /* * Value to be appended. Type: could be a static value matching type of the variable item or Expression with * resultType matching type of the variable item */ - @JsonProperty(value = "value") private Object value; /** @@ -80,4 +83,44 @@ public AppendVariableActivityTypeProperties withValue(Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("variableName", this.variableName); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AppendVariableActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AppendVariableActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AppendVariableActivityTypeProperties. + */ + public static AppendVariableActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AppendVariableActivityTypeProperties deserializedAppendVariableActivityTypeProperties + = new AppendVariableActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("variableName".equals(fieldName)) { + deserializedAppendVariableActivityTypeProperties.variableName = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedAppendVariableActivityTypeProperties.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAppendVariableActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java index abe77c64b662..5799378ce2da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AsanaLinkedServiceTypeProperties.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Asana linked service type properties. */ @Fluent -public final class AsanaLinkedServiceTypeProperties { +public final class AsanaLinkedServiceTypeProperties implements JsonSerializable { /* * The api token for the Asana source. */ - @JsonProperty(value = "apiToken", required = true) private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -91,4 +93,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AsanaLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AsanaLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AsanaLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AsanaLinkedServiceTypeProperties. + */ + public static AsanaLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AsanaLinkedServiceTypeProperties deserializedAsanaLinkedServiceTypeProperties + = new AsanaLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("apiToken".equals(fieldName)) { + deserializedAsanaLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAsanaLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAsanaLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java index 46ff88791532..5d97039923f4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AvroDatasetTypeProperties.java @@ -6,30 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Avro dataset properties. */ @Fluent -public final class AvroDatasetTypeProperties { +public final class AvroDatasetTypeProperties implements JsonSerializable { /* * The location of the avro storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The data avroCompressionCodec. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "avroCompressionCodec") private Object avroCompressionCodec; /* * The avroCompressionLevel property. */ - @JsonProperty(value = "avroCompressionLevel") private Integer avroCompressionLevel; /** @@ -116,4 +117,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AvroDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("avroCompressionCodec", this.avroCompressionCodec); + jsonWriter.writeNumberField("avroCompressionLevel", this.avroCompressionLevel); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AvroDatasetTypeProperties. + */ + public static AvroDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroDatasetTypeProperties deserializedAvroDatasetTypeProperties = new AvroDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedAvroDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("avroCompressionCodec".equals(fieldName)) { + deserializedAvroDatasetTypeProperties.avroCompressionCodec = reader.readUntyped(); + } else if ("avroCompressionLevel".equals(fieldName)) { + deserializedAvroDatasetTypeProperties.avroCompressionLevel = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedAvroDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java index c000e405ae95..c78e5460d7da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzPowerShellSetupTypeProperties.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Installation of Azure PowerShell type properties. */ @Fluent -public final class AzPowerShellSetupTypeProperties { +public final class AzPowerShellSetupTypeProperties implements JsonSerializable { /* * The required version of Azure PowerShell to install. */ - @JsonProperty(value = "version", required = true) private String version; /** @@ -59,4 +62,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzPowerShellSetupTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("version", this.version); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzPowerShellSetupTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzPowerShellSetupTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzPowerShellSetupTypeProperties. + */ + public static AzPowerShellSetupTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzPowerShellSetupTypeProperties deserializedAzPowerShellSetupTypeProperties + = new AzPowerShellSetupTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("version".equals(fieldName)) { + deserializedAzPowerShellSetupTypeProperties.version = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzPowerShellSetupTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java index 3a88f604fae3..c98aad961407 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBatchLinkedServiceTypeProperties.java @@ -6,57 +6,55 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Batch linked service properties. */ @Fluent -public final class AzureBatchLinkedServiceTypeProperties { +public final class AzureBatchLinkedServiceTypeProperties + implements JsonSerializable { /* * The Azure Batch account name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accountName", required = true) private Object accountName; /* * The Azure Batch account access key. */ - @JsonProperty(value = "accessKey") private SecretBase accessKey; /* * The Azure Batch URI. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "batchUri", required = true) private Object batchUri; /* * The Azure Batch pool name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "poolName", required = true) private Object poolName; /* * The Azure Storage linked service reference. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -244,4 +242,61 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureBatchLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accountName", this.accountName); + jsonWriter.writeUntypedField("batchUri", this.batchUri); + jsonWriter.writeUntypedField("poolName", this.poolName); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeJsonField("accessKey", this.accessKey); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBatchLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBatchLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBatchLinkedServiceTypeProperties. + */ + public static AzureBatchLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBatchLinkedServiceTypeProperties deserializedAzureBatchLinkedServiceTypeProperties + = new AzureBatchLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountName".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.accountName = reader.readUntyped(); + } else if ("batchUri".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.batchUri = reader.readUntyped(); + } else if ("poolName".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.poolName = reader.readUntyped(); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.linkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("accessKey".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.accessKey = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureBatchLinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBatchLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java index f2bc706f1e21..03468ad49d20 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobDatasetTypeProperties.java @@ -5,55 +5,52 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Blob dataset properties. */ @Fluent -public final class AzureBlobDatasetTypeProperties { +public final class AzureBlobDatasetTypeProperties implements JsonSerializable { /* * The path of the Azure Blob storage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * The root of blob path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableRootLocation") private Object tableRootLocation; /* * The name of the Azure Blob. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileName") private Object fileName; /* * The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * The format of the Azure Blob storage. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * The data compression method used for the blob storage. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -221,4 +218,59 @@ public void validate() { compression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeUntypedField("tableRootLocation", this.tableRootLocation); + jsonWriter.writeUntypedField("fileName", this.fileName); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobDatasetTypeProperties. + */ + public static AzureBlobDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobDatasetTypeProperties deserializedAzureBlobDatasetTypeProperties + = new AzureBlobDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.folderPath = reader.readUntyped(); + } else if ("tableRootLocation".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.tableRootLocation = reader.readUntyped(); + } else if ("fileName".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.fileName = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedAzureBlobDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSDatasetTypeProperties.java index 74ef179e7fc7..ef25dfaee8f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSDatasetTypeProperties.java @@ -5,37 +5,37 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Lake Storage Gen2 dataset properties. */ @Fluent -public final class AzureBlobFSDatasetTypeProperties { +public final class AzureBlobFSDatasetTypeProperties implements JsonSerializable { /* * The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileName") private Object fileName; /* * The format of the Azure Data Lake Storage Gen2 storage. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * The data compression method used for the blob storage. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -141,4 +141,50 @@ public void validate() { compression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeUntypedField("fileName", this.fileName); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSDatasetTypeProperties. + */ + public static AzureBlobFSDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSDatasetTypeProperties deserializedAzureBlobFSDatasetTypeProperties + = new AzureBlobFSDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureBlobFSDatasetTypeProperties.folderPath = reader.readUntyped(); + } else if ("fileName".equals(fieldName)) { + deserializedAzureBlobFSDatasetTypeProperties.fileName = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedAzureBlobFSDatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedAzureBlobFSDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobFSDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java index 8f82b9b4cbfc..c607921c1bf1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobFSLinkedServiceTypeProperties.java @@ -5,45 +5,45 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Lake Storage Gen2 linked service properties. */ @Fluent -public final class AzureBlobFSLinkedServiceTypeProperties { +public final class AzureBlobFSLinkedServiceTypeProperties + implements JsonSerializable { /* * Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url") private Object url; /* * Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accountKey") private Object accountKey; /* * The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -51,27 +51,23 @@ public final class AzureBlobFSLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -80,19 +76,16 @@ public final class AzureBlobFSLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * SAS URI of the Azure Data Lake Storage Gen2 service. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "sasUri") private Object sasUri; /* * The Azure key vault secret reference of sasToken in sas uri. */ - @JsonProperty(value = "sasToken") private SecretBase sasToken; /** @@ -390,4 +383,78 @@ public void validate() { sasToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("accountKey", this.accountKey); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeUntypedField("sasUri", this.sasUri); + jsonWriter.writeJsonField("sasToken", this.sasToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSLinkedServiceTypeProperties. + */ + public static AzureBlobFSLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSLinkedServiceTypeProperties deserializedAzureBlobFSLinkedServiceTypeProperties + = new AzureBlobFSLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("accountKey".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.accountKey = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("sasUri".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.sasUri = reader.readUntyped(); + } else if ("sasToken".equals(fieldName)) { + deserializedAzureBlobFSLinkedServiceTypeProperties.sasToken = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobFSLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java index 6fcd32b211df..d9359edf275e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureBlobStorageLinkedServiceTypeProperties.java @@ -5,68 +5,65 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; import com.azure.resourcemanager.datafactory.models.AzureStorageAuthenticationType; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Blob Storage linked service properties. */ @Fluent -public final class AzureBlobStorageLinkedServiceTypeProperties { +public final class AzureBlobStorageLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString * or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of accountKey in connection string. */ - @JsonProperty(value = "accountKey") private AzureKeyVaultSecretReference accountKey; /* * SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint * property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "sasUri") private Object sasUri; /* * The Azure key vault secret reference of sasToken in sas uri. */ - @JsonProperty(value = "sasToken") private AzureKeyVaultSecretReference sasToken; /* * Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri * property. */ - @JsonProperty(value = "serviceEndpoint") private Object serviceEndpoint; /* * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -74,40 +71,34 @@ public final class AzureBlobStorageLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general * purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accountKind") private Object accountKind; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private AzureStorageAuthenticationType authenticationType; /* * Container uri of the Azure Blob Storage resource only support for anonymous access. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "containerUri") private Object containerUri; /** @@ -440,4 +431,86 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("accountKey", this.accountKey); + jsonWriter.writeUntypedField("sasUri", this.sasUri); + jsonWriter.writeJsonField("sasToken", this.sasToken); + jsonWriter.writeUntypedField("serviceEndpoint", this.serviceEndpoint); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeUntypedField("accountKind", this.accountKind); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("containerUri", this.containerUri); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageLinkedServiceTypeProperties. + */ + public static AzureBlobStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageLinkedServiceTypeProperties deserializedAzureBlobStorageLinkedServiceTypeProperties + = new AzureBlobStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("accountKey".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.accountKey + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("sasUri".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.sasUri = reader.readUntyped(); + } else if ("sasToken".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.sasToken + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("serviceEndpoint".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.serviceEndpoint = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("accountKind".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.accountKind = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("authenticationType".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.authenticationType + = AzureStorageAuthenticationType.fromString(reader.getString()); + } else if ("containerUri".equals(fieldName)) { + deserializedAzureBlobStorageLinkedServiceTypeProperties.containerUri = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobStorageLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java index 213912d2a690..7146b8ff2fd7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerCommandActivityTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Data Explorer command activity properties. */ @Fluent -public final class AzureDataExplorerCommandActivityTypeProperties { +public final class AzureDataExplorerCommandActivityTypeProperties + implements JsonSerializable { /* * A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "command", required = true) private Object command; /* * Control command timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) */ - @JsonProperty(value = "commandTimeout") private Object commandTimeout; /** @@ -91,4 +94,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerCommandActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("command", this.command); + jsonWriter.writeUntypedField("commandTimeout", this.commandTimeout); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerCommandActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerCommandActivityTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerCommandActivityTypeProperties. + */ + public static AzureDataExplorerCommandActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerCommandActivityTypeProperties deserializedAzureDataExplorerCommandActivityTypeProperties + = new AzureDataExplorerCommandActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("command".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivityTypeProperties.command = reader.readUntyped(); + } else if ("commandTimeout".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivityTypeProperties.commandTimeout = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataExplorerCommandActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerDatasetTypeProperties.java index b4a6d0e3bd06..1bb13d7e0a05 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Data Explorer (Kusto) dataset properties. */ @Fluent -public final class AzureDataExplorerDatasetTypeProperties { +public final class AzureDataExplorerDatasetTypeProperties + implements JsonSerializable { /* * The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -53,4 +57,41 @@ public AzureDataExplorerDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataExplorerDatasetTypeProperties. + */ + public static AzureDataExplorerDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerDatasetTypeProperties deserializedAzureDataExplorerDatasetTypeProperties + = new AzureDataExplorerDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("table".equals(fieldName)) { + deserializedAzureDataExplorerDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataExplorerDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java index df4181273130..4d0e6928bac0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataExplorerLinkedServiceTypeProperties.java @@ -6,52 +6,51 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Explorer (Kusto) linked service properties. */ @Fluent -public final class AzureDataExplorerLinkedServiceTypeProperties { +public final class AzureDataExplorerLinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format * https://..kusto.windows.net. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Kusto. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * Database name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database", required = true) private Object database; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -213,4 +212,59 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerLinkedServiceTypeProperties. + */ + public static AzureDataExplorerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerLinkedServiceTypeProperties deserializedAzureDataExplorerLinkedServiceTypeProperties + = new AzureDataExplorerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedAzureDataExplorerLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataExplorerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java index 58ad7c72fa87..ab46a2b4957c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeAnalyticsLinkedServiceTypeProperties.java @@ -6,65 +6,62 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Lake Analytics linked service properties. */ @Fluent -public final class AzureDataLakeAnalyticsLinkedServiceTypeProperties { +public final class AzureDataLakeAnalyticsLinkedServiceTypeProperties + implements JsonSerializable { /* * The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accountName", required = true) private Object accountName; /* * The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The Key of the application used to authenticate against the Azure Data Lake Analytics account. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant", required = true) private Object tenant; /* * Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "subscriptionId") private Object subscriptionId; /* * Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "resourceGroupName") private Object resourceGroupName; /* * Azure Data Lake Analytics URI Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dataLakeAnalyticsUri") private Object dataLakeAnalyticsUri; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -272,4 +269,68 @@ public void validate() { private static final ClientLogger LOGGER = new ClientLogger(AzureDataLakeAnalyticsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accountName", this.accountName); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("subscriptionId", this.subscriptionId); + jsonWriter.writeUntypedField("resourceGroupName", this.resourceGroupName); + jsonWriter.writeUntypedField("dataLakeAnalyticsUri", this.dataLakeAnalyticsUri); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeAnalyticsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeAnalyticsLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataLakeAnalyticsLinkedServiceTypeProperties. + */ + public static AzureDataLakeAnalyticsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeAnalyticsLinkedServiceTypeProperties deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties + = new AzureDataLakeAnalyticsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountName".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.accountName = reader.readUntyped(); + } else if ("tenant".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.servicePrincipalId + = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("subscriptionId".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.subscriptionId = reader.readUntyped(); + } else if ("resourceGroupName".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.resourceGroupName + = reader.readUntyped(); + } else if ("dataLakeAnalyticsUri".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.dataLakeAnalyticsUri + = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataLakeAnalyticsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreDatasetTypeProperties.java index e50b858b5a1e..abe8b202c289 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreDatasetTypeProperties.java @@ -5,37 +5,38 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Lake Store dataset properties. */ @Fluent -public final class AzureDataLakeStoreDatasetTypeProperties { +public final class AzureDataLakeStoreDatasetTypeProperties + implements JsonSerializable { /* * Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileName") private Object fileName; /* * The format of the Data Lake Store. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * The data compression method used for the item(s) in the Azure Data Lake Store. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -141,4 +142,51 @@ public void validate() { compression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeUntypedField("fileName", this.fileName); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreDatasetTypeProperties. + */ + public static AzureDataLakeStoreDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreDatasetTypeProperties deserializedAzureDataLakeStoreDatasetTypeProperties + = new AzureDataLakeStoreDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureDataLakeStoreDatasetTypeProperties.folderPath = reader.readUntyped(); + } else if ("fileName".equals(fieldName)) { + deserializedAzureDataLakeStoreDatasetTypeProperties.fileName = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedAzureDataLakeStoreDatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedAzureDataLakeStoreDatasetTypeProperties.compression + = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataLakeStoreDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java index 5d5c7a404534..ddeecc860546 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDataLakeStoreLinkedServiceTypeProperties.java @@ -6,39 +6,40 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Lake Store linked service properties. */ @Fluent -public final class AzureDataLakeStoreLinkedServiceTypeProperties { +public final class AzureDataLakeStoreLinkedServiceTypeProperties + implements JsonSerializable { /* * Data Lake Store service URI. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dataLakeStoreUri", required = true) private Object dataLakeStoreUri; /* * The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The Key of the application used to authenticate against the Azure Data Lake Store account. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -46,40 +47,34 @@ public final class AzureDataLakeStoreLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * Data Lake Store account name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accountName") private Object accountName; /* * Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "subscriptionId") private Object subscriptionId; /* * Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "resourceGroupName") private Object resourceGroupName; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -326,4 +321,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataLakeStoreLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("dataLakeStoreUri", this.dataLakeStoreUri); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeUntypedField("accountName", this.accountName); + jsonWriter.writeUntypedField("subscriptionId", this.subscriptionId); + jsonWriter.writeUntypedField("resourceGroupName", this.resourceGroupName); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreLinkedServiceTypeProperties. + */ + public static AzureDataLakeStoreLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreLinkedServiceTypeProperties deserializedAzureDataLakeStoreLinkedServiceTypeProperties + = new AzureDataLakeStoreLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataLakeStoreUri".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.dataLakeStoreUri = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("accountName".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.accountName = reader.readUntyped(); + } else if ("subscriptionId".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.subscriptionId = reader.readUntyped(); + } else if ("resourceGroupName".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.resourceGroupName = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDataLakeStoreLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java index c32e464f0019..aaf7a602e1fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDeltaLakeDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Databricks Delta Lake Dataset Properties. */ @Fluent -public final class AzureDatabricksDeltaLakeDatasetTypeProperties { +public final class AzureDatabricksDeltaLakeDatasetTypeProperties + implements JsonSerializable { /* * The name of delta table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The database name of delta table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database") private Object database; /** @@ -77,4 +80,44 @@ public AzureDatabricksDeltaLakeDatasetTypeProperties withDatabase(Object databas */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("database", this.database); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeDatasetTypeProperties. + */ + public static AzureDatabricksDeltaLakeDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeDatasetTypeProperties deserializedAzureDatabricksDeltaLakeDatasetTypeProperties + = new AzureDatabricksDeltaLakeDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("table".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDatasetTypeProperties.table = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDatasetTypeProperties.database = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDatabricksDeltaLakeDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java index fc130bf2ad90..b097fadf1e84 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksDetltaLakeLinkedServiceTypeProperties.java @@ -6,53 +6,52 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Databricks Delta Lake linked service properties. */ @Fluent -public final class AzureDatabricksDetltaLakeLinkedServiceTypeProperties { +public final class AzureDatabricksDetltaLakeLinkedServiceTypeProperties + implements JsonSerializable { /* * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "domain", required = true) private Object domain; /* * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. * Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "clusterId") private Object clusterId; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "workspaceResourceId") private Object workspaceResourceId; /** @@ -214,4 +213,62 @@ public void validate() { private static final ClientLogger LOGGER = new ClientLogger(AzureDatabricksDetltaLakeLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeUntypedField("clusterId", this.clusterId); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("workspaceResourceId", this.workspaceResourceId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDetltaLakeLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDetltaLakeLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDatabricksDetltaLakeLinkedServiceTypeProperties. + */ + public static AzureDatabricksDetltaLakeLinkedServiceTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDetltaLakeLinkedServiceTypeProperties deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties + = new AzureDatabricksDetltaLakeLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("domain".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.domain = reader.readUntyped(); + } else if ("accessToken".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.accessToken + = SecretBase.fromJson(reader); + } else if ("clusterId".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.clusterId = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("workspaceResourceId".equals(fieldName)) { + deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties.workspaceResourceId + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDatabricksDetltaLakeLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java index 76487042bba6..e265b16d8d06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java @@ -6,56 +6,54 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Azure Databricks linked service properties. */ @Fluent -public final class AzureDatabricksLinkedServiceTypeProperties { +public final class AzureDatabricksLinkedServiceTypeProperties + implements JsonSerializable { /* * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "domain", required = true) private Object domain; /* * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "authentication") private Object authentication; /* * Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "workspaceResourceId") private Object workspaceResourceId; /* * The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "existingClusterId") private Object existingClusterId; /* * The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "instancePoolId") private Object instancePoolId; /* @@ -63,7 +61,6 @@ public final class AzureDatabricksLinkedServiceTypeProperties { * pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "newClusterVersion") private Object newClusterVersion; /* @@ -73,7 +70,6 @@ public final class AzureDatabricksLinkedServiceTypeProperties { * only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "newClusterNumOfWorker") private Object newClusterNumOfWorker; /* @@ -81,76 +77,62 @@ public final class AzureDatabricksLinkedServiceTypeProperties { * instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "newClusterNodeType") private Object newClusterNodeType; /* * A set of optional, user-specified Spark configuration key-value pairs. */ - @JsonProperty(value = "newClusterSparkConf") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map newClusterSparkConf; /* * A set of optional, user-specified Spark environment variables key-value pairs. */ - @JsonProperty(value = "newClusterSparkEnvVars") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map newClusterSparkEnvVars; /* * Additional tags for cluster resources. This property is ignored in instance pool configurations. */ - @JsonProperty(value = "newClusterCustomTags") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map newClusterCustomTags; /* * Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "newClusterLogDestination") private Object newClusterLogDestination; /* * The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "newClusterDriverNodeType") private Object newClusterDriverNodeType; /* * User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType * array of strings). */ - @JsonProperty(value = "newClusterInitScripts") private Object newClusterInitScripts; /* * Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk * behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "newClusterEnableElasticDisk") private Object newClusterEnableElasticDisk; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "policyId") private Object policyId; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -610,4 +592,107 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDatabricksLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeUntypedField("authentication", this.authentication); + jsonWriter.writeUntypedField("workspaceResourceId", this.workspaceResourceId); + jsonWriter.writeUntypedField("existingClusterId", this.existingClusterId); + jsonWriter.writeUntypedField("instancePoolId", this.instancePoolId); + jsonWriter.writeUntypedField("newClusterVersion", this.newClusterVersion); + jsonWriter.writeUntypedField("newClusterNumOfWorker", this.newClusterNumOfWorker); + jsonWriter.writeUntypedField("newClusterNodeType", this.newClusterNodeType); + jsonWriter.writeMapField("newClusterSparkConf", this.newClusterSparkConf, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("newClusterSparkEnvVars", this.newClusterSparkEnvVars, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("newClusterCustomTags", this.newClusterCustomTags, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("newClusterLogDestination", this.newClusterLogDestination); + jsonWriter.writeUntypedField("newClusterDriverNodeType", this.newClusterDriverNodeType); + jsonWriter.writeUntypedField("newClusterInitScripts", this.newClusterInitScripts); + jsonWriter.writeUntypedField("newClusterEnableElasticDisk", this.newClusterEnableElasticDisk); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("policyId", this.policyId); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDatabricksLinkedServiceTypeProperties. + */ + public static AzureDatabricksLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksLinkedServiceTypeProperties deserializedAzureDatabricksLinkedServiceTypeProperties + = new AzureDatabricksLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("domain".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.domain = reader.readUntyped(); + } else if ("accessToken".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("authentication".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.authentication = reader.readUntyped(); + } else if ("workspaceResourceId".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.workspaceResourceId = reader.readUntyped(); + } else if ("existingClusterId".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.existingClusterId = reader.readUntyped(); + } else if ("instancePoolId".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.instancePoolId = reader.readUntyped(); + } else if ("newClusterVersion".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterVersion = reader.readUntyped(); + } else if ("newClusterNumOfWorker".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterNumOfWorker = reader.readUntyped(); + } else if ("newClusterNodeType".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterNodeType = reader.readUntyped(); + } else if ("newClusterSparkConf".equals(fieldName)) { + Map newClusterSparkConf = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterSparkConf = newClusterSparkConf; + } else if ("newClusterSparkEnvVars".equals(fieldName)) { + Map newClusterSparkEnvVars = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterSparkEnvVars + = newClusterSparkEnvVars; + } else if ("newClusterCustomTags".equals(fieldName)) { + Map newClusterCustomTags = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterCustomTags = newClusterCustomTags; + } else if ("newClusterLogDestination".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterLogDestination + = reader.readUntyped(); + } else if ("newClusterDriverNodeType".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterDriverNodeType + = reader.readUntyped(); + } else if ("newClusterInitScripts".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterInitScripts = reader.readUntyped(); + } else if ("newClusterEnableElasticDisk".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.newClusterEnableElasticDisk + = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("policyId".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.policyId = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedAzureDatabricksLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureDatabricksLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java index c86b7def881d..2d0523f08a71 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFileStorageLinkedServiceTypeProperties.java @@ -5,79 +5,86 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; +import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure File Storage linked service properties. */ @Fluent -public final class AzureFileStorageLinkedServiceTypeProperties { +public final class AzureFileStorageLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name of the server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "host") private Object host; /* * User ID to logon the server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userId") private Object userId; /* * Password to logon the server. */ - @JsonProperty(value = "password") private SecretBase password; /* * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of accountKey in connection string. */ - @JsonProperty(value = "accountKey") private AzureKeyVaultSecretReference accountKey; /* * SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, * SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "sasUri") private Object sasUri; /* * The Azure key vault secret reference of sasToken in sas uri. */ - @JsonProperty(value = "sasToken") private AzureKeyVaultSecretReference sasToken; /* * The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "fileShare") private Object fileShare; /* * The azure file share snapshot version. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "snapshot") private Object snapshot; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; + /* + * File service endpoint of the Azure File Storage resource. It is mutually exclusive with connectionString, sasUri + * property. + */ + private Object serviceEndpoint; + + /* + * The credential reference containing authentication information. + */ + private CredentialReference credential; + /** * Creates an instance of AzureFileStorageLinkedServiceTypeProperties class. */ @@ -294,6 +301,48 @@ public AzureFileStorageLinkedServiceTypeProperties withEncryptedCredential(Strin return this; } + /** + * Get the serviceEndpoint property: File service endpoint of the Azure File Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @return the serviceEndpoint value. + */ + public Object serviceEndpoint() { + return this.serviceEndpoint; + } + + /** + * Set the serviceEndpoint property: File service endpoint of the Azure File Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @param serviceEndpoint the serviceEndpoint value to set. + * @return the AzureFileStorageLinkedServiceTypeProperties object itself. + */ + public AzureFileStorageLinkedServiceTypeProperties withServiceEndpoint(Object serviceEndpoint) { + this.serviceEndpoint = serviceEndpoint; + return this; + } + + /** + * Get the credential property: The credential reference containing authentication information. + * + * @return the credential value. + */ + public CredentialReference credential() { + return this.credential; + } + + /** + * Set the credential property: The credential reference containing authentication information. + * + * @param credential the credential value to set. + * @return the AzureFileStorageLinkedServiceTypeProperties object itself. + */ + public AzureFileStorageLinkedServiceTypeProperties withCredential(CredentialReference credential) { + this.credential = credential; + return this; + } + /** * Validates the instance. * @@ -309,5 +358,81 @@ public void validate() { if (sasToken() != null) { sasToken().validate(); } + if (credential() != null) { + credential().validate(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("userId", this.userId); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("accountKey", this.accountKey); + jsonWriter.writeUntypedField("sasUri", this.sasUri); + jsonWriter.writeJsonField("sasToken", this.sasToken); + jsonWriter.writeUntypedField("fileShare", this.fileShare); + jsonWriter.writeUntypedField("snapshot", this.snapshot); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("serviceEndpoint", this.serviceEndpoint); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFileStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFileStorageLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureFileStorageLinkedServiceTypeProperties. + */ + public static AzureFileStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFileStorageLinkedServiceTypeProperties deserializedAzureFileStorageLinkedServiceTypeProperties + = new AzureFileStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("userId".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.userId = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("connectionString".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("accountKey".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.accountKey + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("sasUri".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.sasUri = reader.readUntyped(); + } else if ("sasToken".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.sasToken + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("fileShare".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.fileShare = reader.readUntyped(); + } else if ("snapshot".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.snapshot = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("serviceEndpoint".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.serviceEndpoint = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedAzureFileStorageLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureFileStorageLinkedServiceTypeProperties; + }); } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java index 053b1edb887a..c0625d819f1d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionActivityTypeProperties.java @@ -6,27 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureFunctionActivityMethod; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Azure Function activity type properties. */ @Fluent -public final class AzureFunctionActivityTypeProperties { +public final class AzureFunctionActivityTypeProperties + implements JsonSerializable { /* * Rest API method for target endpoint. */ - @JsonProperty(value = "method", required = true) private AzureFunctionActivityMethod method; /* * Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType * string) */ - @JsonProperty(value = "functionName", required = true) private Object functionName; /* @@ -34,15 +36,12 @@ public final class AzureFunctionActivityTypeProperties { * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "headers") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map headers; /* * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET * method Type: string (or Expression with resultType string). */ - @JsonProperty(value = "body") private Object body; /** @@ -158,4 +157,53 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureFunctionActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("method", this.method == null ? null : this.method.toString()); + jsonWriter.writeUntypedField("functionName", this.functionName); + jsonWriter.writeMapField("headers", this.headers, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("body", this.body); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFunctionActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFunctionActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureFunctionActivityTypeProperties. + */ + public static AzureFunctionActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFunctionActivityTypeProperties deserializedAzureFunctionActivityTypeProperties + = new AzureFunctionActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("method".equals(fieldName)) { + deserializedAzureFunctionActivityTypeProperties.method + = AzureFunctionActivityMethod.fromString(reader.getString()); + } else if ("functionName".equals(fieldName)) { + deserializedAzureFunctionActivityTypeProperties.functionName = reader.readUntyped(); + } else if ("headers".equals(fieldName)) { + Map headers = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedAzureFunctionActivityTypeProperties.headers = headers; + } else if ("body".equals(fieldName)) { + deserializedAzureFunctionActivityTypeProperties.body = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureFunctionActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java index 5478c6105bcb..a5c240c9d78f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureFunctionLinkedServiceTypeProperties.java @@ -6,52 +6,51 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Function linked service properties. */ @Fluent -public final class AzureFunctionLinkedServiceTypeProperties { +public final class AzureFunctionLinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "functionAppUrl", required = true) private Object functionAppUrl; /* * Function or Host key for Azure Function App. */ - @JsonProperty(value = "functionKey") private SecretBase functionKey; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * Allowed token audiences for azure function. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "resourceId") private Object resourceId; /* * Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "authentication") private Object authentication; /** @@ -208,4 +207,58 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureFunctionLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("functionAppUrl", this.functionAppUrl); + jsonWriter.writeJsonField("functionKey", this.functionKey); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("resourceId", this.resourceId); + jsonWriter.writeUntypedField("authentication", this.authentication); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFunctionLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFunctionLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureFunctionLinkedServiceTypeProperties. + */ + public static AzureFunctionLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFunctionLinkedServiceTypeProperties deserializedAzureFunctionLinkedServiceTypeProperties + = new AzureFunctionLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("functionAppUrl".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.functionAppUrl = reader.readUntyped(); + } else if ("functionKey".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.functionKey = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("resourceId".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.resourceId = reader.readUntyped(); + } else if ("authentication".equals(fieldName)) { + deserializedAzureFunctionLinkedServiceTypeProperties.authentication = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureFunctionLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java index 9098fe63cbe3..3cb36cb36265 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureKeyVaultLinkedServiceTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Key Vault linked service properties. */ @Fluent -public final class AzureKeyVaultLinkedServiceTypeProperties { +public final class AzureKeyVaultLinkedServiceTypeProperties + implements JsonSerializable { /* * The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "baseUrl", required = true) private Object baseUrl; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -92,4 +95,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureKeyVaultLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("baseUrl", this.baseUrl); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureKeyVaultLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureKeyVaultLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureKeyVaultLinkedServiceTypeProperties. + */ + public static AzureKeyVaultLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureKeyVaultLinkedServiceTypeProperties deserializedAzureKeyVaultLinkedServiceTypeProperties + = new AzureKeyVaultLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("baseUrl".equals(fieldName)) { + deserializedAzureKeyVaultLinkedServiceTypeProperties.baseUrl = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedAzureKeyVaultLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureKeyVaultLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java index fa6c9efba211..f2085ec8413c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLBatchExecutionActivityTypeProperties.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureMLWebServiceFile; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Azure ML Batch Execution activity properties. */ @Fluent -public final class AzureMLBatchExecutionActivityTypeProperties { +public final class AzureMLBatchExecutionActivityTypeProperties + implements JsonSerializable { /* * Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web * service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters * property of the Azure ML batch execution request. */ - @JsonProperty(value = "globalParameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map globalParameters; /* @@ -29,8 +31,6 @@ public final class AzureMLBatchExecutionActivityTypeProperties { * specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the * Azure ML batch execution request. */ - @JsonProperty(value = "webServiceOutputs") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map webServiceOutputs; /* @@ -38,8 +38,6 @@ public final class AzureMLBatchExecutionActivityTypeProperties { * specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the * Azure ML batch execution request. */ - @JsonProperty(value = "webServiceInputs") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map webServiceInputs; /** @@ -143,4 +141,55 @@ public void validate() { }); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeMapField("globalParameters", this.globalParameters, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("webServiceOutputs", this.webServiceOutputs, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("webServiceInputs", this.webServiceInputs, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLBatchExecutionActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLBatchExecutionActivityTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMLBatchExecutionActivityTypeProperties. + */ + public static AzureMLBatchExecutionActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLBatchExecutionActivityTypeProperties deserializedAzureMLBatchExecutionActivityTypeProperties + = new AzureMLBatchExecutionActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("globalParameters".equals(fieldName)) { + Map globalParameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedAzureMLBatchExecutionActivityTypeProperties.globalParameters = globalParameters; + } else if ("webServiceOutputs".equals(fieldName)) { + Map webServiceOutputs + = reader.readMap(reader1 -> AzureMLWebServiceFile.fromJson(reader1)); + deserializedAzureMLBatchExecutionActivityTypeProperties.webServiceOutputs = webServiceOutputs; + } else if ("webServiceInputs".equals(fieldName)) { + Map webServiceInputs + = reader.readMap(reader1 -> AzureMLWebServiceFile.fromJson(reader1)); + deserializedAzureMLBatchExecutionActivityTypeProperties.webServiceInputs = webServiceInputs; + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLBatchExecutionActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java index 56645b11f502..3e21e559539a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLExecutePipelineActivityTypeProperties.java @@ -5,36 +5,37 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure ML Execute Pipeline activity properties. */ @Fluent -public final class AzureMLExecutePipelineActivityTypeProperties { +public final class AzureMLExecutePipelineActivityTypeProperties + implements JsonSerializable { /* * ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mlPipelineId") private Object mlPipelineId; /* * ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mlPipelineEndpointId") private Object mlPipelineEndpointId; /* * Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /* * Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property * of the published pipeline execution request. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "experimentName") private Object experimentName; /* @@ -42,7 +43,6 @@ public final class AzureMLExecutePipelineActivityTypeProperties { * parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the * published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). */ - @JsonProperty(value = "mlPipelineParameters") private Object mlPipelineParameters; /* @@ -50,14 +50,12 @@ public final class AzureMLExecutePipelineActivityTypeProperties { * dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with * resultType object). */ - @JsonProperty(value = "dataPathAssignments") private Object dataPathAssignments; /* * The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the * published pipeline execution request. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mlParentRunId") private Object mlParentRunId; /* @@ -65,7 +63,6 @@ public final class AzureMLExecutePipelineActivityTypeProperties { * in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression * with resultType boolean). */ - @JsonProperty(value = "continueOnStepFailure") private Object continueOnStepFailure; /** @@ -269,4 +266,65 @@ public AzureMLExecutePipelineActivityTypeProperties withContinueOnStepFailure(Ob */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("mlPipelineId", this.mlPipelineId); + jsonWriter.writeUntypedField("mlPipelineEndpointId", this.mlPipelineEndpointId); + jsonWriter.writeUntypedField("version", this.version); + jsonWriter.writeUntypedField("experimentName", this.experimentName); + jsonWriter.writeUntypedField("mlPipelineParameters", this.mlPipelineParameters); + jsonWriter.writeUntypedField("dataPathAssignments", this.dataPathAssignments); + jsonWriter.writeUntypedField("mlParentRunId", this.mlParentRunId); + jsonWriter.writeUntypedField("continueOnStepFailure", this.continueOnStepFailure); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLExecutePipelineActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLExecutePipelineActivityTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMLExecutePipelineActivityTypeProperties. + */ + public static AzureMLExecutePipelineActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLExecutePipelineActivityTypeProperties deserializedAzureMLExecutePipelineActivityTypeProperties + = new AzureMLExecutePipelineActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("mlPipelineId".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.mlPipelineId = reader.readUntyped(); + } else if ("mlPipelineEndpointId".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.mlPipelineEndpointId + = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.version = reader.readUntyped(); + } else if ("experimentName".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.experimentName = reader.readUntyped(); + } else if ("mlPipelineParameters".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.mlPipelineParameters + = reader.readUntyped(); + } else if ("dataPathAssignments".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.dataPathAssignments = reader.readUntyped(); + } else if ("mlParentRunId".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.mlParentRunId = reader.readUntyped(); + } else if ("continueOnStepFailure".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivityTypeProperties.continueOnStepFailure + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLExecutePipelineActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java index 1acffb8830fe..9501cad27195 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLLinkedServiceTypeProperties.java @@ -6,67 +6,63 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure ML Studio Web Service linked service properties. */ @Fluent -public final class AzureMLLinkedServiceTypeProperties { +public final class AzureMLLinkedServiceTypeProperties implements JsonSerializable { /* * The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "mlEndpoint", required = true) private Object mlEndpoint; /* * The API key for accessing the Azure ML model endpoint. */ - @JsonProperty(value = "apiKey", required = true) private SecretBase apiKey; /* * The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "updateResourceEndpoint") private Object updateResourceEndpoint; /* * The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML * Studio web service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML * Studio web service. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "authentication") private Object authentication; /** @@ -273,4 +269,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("mlEndpoint", this.mlEndpoint); + jsonWriter.writeJsonField("apiKey", this.apiKey); + jsonWriter.writeUntypedField("updateResourceEndpoint", this.updateResourceEndpoint); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("authentication", this.authentication); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLLinkedServiceTypeProperties. + */ + public static AzureMLLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLLinkedServiceTypeProperties deserializedAzureMLLinkedServiceTypeProperties + = new AzureMLLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("mlEndpoint".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.mlEndpoint = reader.readUntyped(); + } else if ("apiKey".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.apiKey = SecretBase.fromJson(reader); + } else if ("updateResourceEndpoint".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.updateResourceEndpoint = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("authentication".equals(fieldName)) { + deserializedAzureMLLinkedServiceTypeProperties.authentication = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java index a093137783d3..9e976f710623 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLServiceLinkedServiceTypeProperties.java @@ -6,65 +6,62 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure ML Service linked service properties. */ @Fluent -public final class AzureMLServiceLinkedServiceTypeProperties { +public final class AzureMLServiceLinkedServiceTypeProperties + implements JsonSerializable { /* * Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "subscriptionId", required = true) private Object subscriptionId; /* * Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "resourceGroupName", required = true) private Object resourceGroupName; /* * Azure ML Service workspace name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mlWorkspaceName", required = true) private Object mlWorkspaceName; /* * Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "authentication") private Object authentication; /* * The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service * pipeline. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against the endpoint of a published Azure ML Service * pipeline. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -276,4 +273,64 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLServiceLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("subscriptionId", this.subscriptionId); + jsonWriter.writeUntypedField("resourceGroupName", this.resourceGroupName); + jsonWriter.writeUntypedField("mlWorkspaceName", this.mlWorkspaceName); + jsonWriter.writeUntypedField("authentication", this.authentication); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLServiceLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLServiceLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLServiceLinkedServiceTypeProperties. + */ + public static AzureMLServiceLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLServiceLinkedServiceTypeProperties deserializedAzureMLServiceLinkedServiceTypeProperties + = new AzureMLServiceLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("subscriptionId".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.subscriptionId = reader.readUntyped(); + } else if ("resourceGroupName".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.resourceGroupName = reader.readUntyped(); + } else if ("mlWorkspaceName".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.mlWorkspaceName = reader.readUntyped(); + } else if ("authentication".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.authentication = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureMLServiceLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLServiceLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java index 2bca2dace3be..2a671f7c0ee7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMLUpdateResourceActivityTypeProperties.java @@ -6,32 +6,34 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure ML Update Resource activity properties. */ @Fluent -public final class AzureMLUpdateResourceActivityTypeProperties { +public final class AzureMLUpdateResourceActivityTypeProperties + implements JsonSerializable { /* * Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "trainedModelName", required = true) private Object trainedModelName; /* * Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. */ - @JsonProperty(value = "trainedModelLinkedServiceName", required = true) private LinkedServiceReference trainedModelLinkedServiceName; /* * The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the * update operation. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "trainedModelFilePath", required = true) private Object trainedModelFilePath; /** @@ -135,4 +137,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLUpdateResourceActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("trainedModelName", this.trainedModelName); + jsonWriter.writeJsonField("trainedModelLinkedServiceName", this.trainedModelLinkedServiceName); + jsonWriter.writeUntypedField("trainedModelFilePath", this.trainedModelFilePath); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLUpdateResourceActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLUpdateResourceActivityTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLUpdateResourceActivityTypeProperties. + */ + public static AzureMLUpdateResourceActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLUpdateResourceActivityTypeProperties deserializedAzureMLUpdateResourceActivityTypeProperties + = new AzureMLUpdateResourceActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("trainedModelName".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivityTypeProperties.trainedModelName = reader.readUntyped(); + } else if ("trainedModelLinkedServiceName".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivityTypeProperties.trainedModelLinkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("trainedModelFilePath".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivityTypeProperties.trainedModelFilePath = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLUpdateResourceActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java index 1d01839c7e17..6e5166930c70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMariaDBLinkedServiceTypeProperties.java @@ -5,31 +5,33 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Database for MariaDB linked service properties. */ @Fluent -public final class AzureMariaDBLinkedServiceTypeProperties { +public final class AzureMariaDBLinkedServiceTypeProperties + implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "pwd") private AzureKeyVaultSecretReference pwd; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +114,48 @@ public void validate() { pwd().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("pwd", this.pwd); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMariaDBLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMariaDBLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMariaDBLinkedServiceTypeProperties. + */ + public static AzureMariaDBLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMariaDBLinkedServiceTypeProperties deserializedAzureMariaDBLinkedServiceTypeProperties + = new AzureMariaDBLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzureMariaDBLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("pwd".equals(fieldName)) { + deserializedAzureMariaDBLinkedServiceTypeProperties.pwd + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureMariaDBLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMariaDBLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java index b4799890fce3..ce03896bd73e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure MySQL database linked service properties. */ @Fluent -public final class AzureMySqlLinkedServiceTypeProperties { +public final class AzureMySqlLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -120,4 +122,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMySqlLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMySqlLinkedServiceTypeProperties. + */ + public static AzureMySqlLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlLinkedServiceTypeProperties deserializedAzureMySqlLinkedServiceTypeProperties + = new AzureMySqlLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzureMySqlLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzureMySqlLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureMySqlLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMySqlLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlTableDatasetTypeProperties.java index 8ca7de084b10..2c1bd57a2a15 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureMySqlTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure MySQL database dataset properties. */ @Fluent -public final class AzureMySqlTableDatasetTypeProperties { +public final class AzureMySqlTableDatasetTypeProperties + implements JsonSerializable { /* * The Azure MySQL database table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /* * The name of Azure MySQL database table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +84,44 @@ public AzureMySqlTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMySqlTableDatasetTypeProperties. + */ + public static AzureMySqlTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlTableDatasetTypeProperties deserializedAzureMySqlTableDatasetTypeProperties + = new AzureMySqlTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzureMySqlTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAzureMySqlTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMySqlTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java index 9a19a3787ccc..fd1991ac18d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlLinkedServiceTypeProperties.java @@ -5,31 +5,33 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure PostgreSQL linked service properties. */ @Fluent -public final class AzurePostgreSqlLinkedServiceTypeProperties { +public final class AzurePostgreSqlLinkedServiceTypeProperties + implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +114,48 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzurePostgreSqlLinkedServiceTypeProperties. + */ + public static AzurePostgreSqlLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlLinkedServiceTypeProperties deserializedAzurePostgreSqlLinkedServiceTypeProperties + = new AzurePostgreSqlLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzurePostgreSqlLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java index ecc6e1515639..ce491141f5f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzurePostgreSqlTableDatasetTypeProperties.java @@ -5,30 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure PostgreSQL dataset properties. */ @Fluent -public final class AzurePostgreSqlTableDatasetTypeProperties { +public final class AzurePostgreSqlTableDatasetTypeProperties + implements JsonSerializable { /* * The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -110,4 +112,47 @@ public AzurePostgreSqlTableDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlTableDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzurePostgreSqlTableDatasetTypeProperties. + */ + public static AzurePostgreSqlTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlTableDatasetTypeProperties deserializedAzurePostgreSqlTableDatasetTypeProperties + = new AzurePostgreSqlTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzurePostgreSqlTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAzurePostgreSqlTableDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedAzurePostgreSqlTableDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzurePostgreSqlTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java index 68dd8c122ea0..a15006fbd5c3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchIndexDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Properties specific to this dataset type. */ @Fluent -public final class AzureSearchIndexDatasetTypeProperties { +public final class AzureSearchIndexDatasetTypeProperties + implements JsonSerializable { /* * The name of the Azure Search Index. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "indexName", required = true) private Object indexName; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSearchIndexDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("indexName", this.indexName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSearchIndexDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSearchIndexDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSearchIndexDatasetTypeProperties. + */ + public static AzureSearchIndexDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSearchIndexDatasetTypeProperties deserializedAzureSearchIndexDatasetTypeProperties + = new AzureSearchIndexDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("indexName".equals(fieldName)) { + deserializedAzureSearchIndexDatasetTypeProperties.indexName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSearchIndexDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java index 06eeed82e520..707e8cf23b72 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSearchLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Windows Azure Search Service linked service properties. */ @Fluent -public final class AzureSearchLinkedServiceTypeProperties { +public final class AzureSearchLinkedServiceTypeProperties + implements JsonSerializable { /* * URL for Azure Search service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * Admin Key for Azure Search service */ - @JsonProperty(value = "key") private SecretBase key; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -118,4 +120,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSearchLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeJsonField("key", this.key); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSearchLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSearchLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSearchLinkedServiceTypeProperties. + */ + public static AzureSearchLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSearchLinkedServiceTypeProperties deserializedAzureSearchLinkedServiceTypeProperties + = new AzureSearchLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedAzureSearchLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("key".equals(fieldName)) { + deserializedAzureSearchLinkedServiceTypeProperties.key = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureSearchLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSearchLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java index 02055ce0d942..433233dc0aad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWLinkedServiceTypeProperties.java @@ -5,12 +5,15 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; import com.azure.resourcemanager.datafactory.models.AzureSqlDWAuthenticationType; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure SQL Data Warehouse linked service properties. @@ -21,45 +24,38 @@ public final class AzureSqlDWLinkedServiceTypeProperties extends SqlServerBaseLi * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private AzureSqlDWAuthenticationType authenticationType; /* * The user name to be used when connecting to server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -68,14 +64,12 @@ public final class AzureSqlDWLinkedServiceTypeProperties extends SqlServerBaseLi * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -83,20 +77,17 @@ public final class AzureSqlDWLinkedServiceTypeProperties extends SqlServerBaseLi * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -565,4 +556,137 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", server()); + jsonWriter.writeUntypedField("database", database()); + jsonWriter.writeUntypedField("encrypt", encrypt()); + jsonWriter.writeUntypedField("trustServerCertificate", trustServerCertificate()); + jsonWriter.writeUntypedField("hostNameInCertificate", hostnameInCertificate()); + jsonWriter.writeUntypedField("applicationIntent", applicationIntent()); + jsonWriter.writeUntypedField("connectTimeout", connectTimeout()); + jsonWriter.writeUntypedField("connectRetryCount", connectRetryCount()); + jsonWriter.writeUntypedField("connectRetryInterval", connectRetryInterval()); + jsonWriter.writeUntypedField("loadBalanceTimeout", loadBalanceTimeout()); + jsonWriter.writeUntypedField("commandTimeout", commandTimeout()); + jsonWriter.writeUntypedField("integratedSecurity", integratedSecurity()); + jsonWriter.writeUntypedField("failoverPartner", failoverPartner()); + jsonWriter.writeUntypedField("maxPoolSize", maxPoolSize()); + jsonWriter.writeUntypedField("minPoolSize", minPoolSize()); + jsonWriter.writeUntypedField("multipleActiveResultSets", multipleActiveResultSets()); + jsonWriter.writeUntypedField("multiSubnetFailover", multiSubnetFailover()); + jsonWriter.writeUntypedField("packetSize", packetSize()); + jsonWriter.writeUntypedField("pooling", pooling()); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDWLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDWLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlDWLinkedServiceTypeProperties. + */ + public static AzureSqlDWLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDWLinkedServiceTypeProperties deserializedAzureSqlDWLinkedServiceTypeProperties + = new AzureSqlDWLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withServer(reader.readUntyped()); + } else if ("database".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withDatabase(reader.readUntyped()); + } else if ("encrypt".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withEncrypt(reader.readUntyped()); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withTrustServerCertificate(reader.readUntyped()); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withHostnameInCertificate(reader.readUntyped()); + } else if ("applicationIntent".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withApplicationIntent(reader.readUntyped()); + } else if ("connectTimeout".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withConnectTimeout(reader.readUntyped()); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withConnectRetryCount(reader.readUntyped()); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withConnectRetryInterval(reader.readUntyped()); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withLoadBalanceTimeout(reader.readUntyped()); + } else if ("commandTimeout".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withCommandTimeout(reader.readUntyped()); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withIntegratedSecurity(reader.readUntyped()); + } else if ("failoverPartner".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withFailoverPartner(reader.readUntyped()); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withMaxPoolSize(reader.readUntyped()); + } else if ("minPoolSize".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withMinPoolSize(reader.readUntyped()); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties + .withMultipleActiveResultSets(reader.readUntyped()); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withMultiSubnetFailover(reader.readUntyped()); + } else if ("packetSize".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withPacketSize(reader.readUntyped()); + } else if ("pooling".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.withPooling(reader.readUntyped()); + } else if ("connectionString".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.authenticationType + = AzureSqlDWAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedAzureSqlDWLinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlDWLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWTableDatasetTypeProperties.java index 6cca4582f167..dcc72f21acf5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDWTableDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure SQL Data Warehouse dataset properties. */ @Fluent -public final class AzureSqlDWTableDatasetTypeProperties { +public final class AzureSqlDWTableDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -109,4 +111,47 @@ public AzureSqlDWTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDWTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDWTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlDWTableDatasetTypeProperties. + */ + public static AzureSqlDWTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDWTableDatasetTypeProperties deserializedAzureSqlDWTableDatasetTypeProperties + = new AzureSqlDWTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzureSqlDWTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlDWTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAzureSqlDWTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlDWTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java index ef03577d931a..72fc89b3a368 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlDatabaseLinkedServiceTypeProperties.java @@ -5,13 +5,16 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; import com.azure.resourcemanager.datafactory.models.AzureSqlDatabaseAuthenticationType; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedProperties; import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure SQL Database linked service properties. @@ -21,45 +24,38 @@ public final class AzureSqlDatabaseLinkedServiceTypeProperties extends SqlServer /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private AzureSqlDatabaseAuthenticationType authenticationType; /* * The user name to be used when connecting to server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Azure SQL Database. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -68,14 +64,12 @@ public final class AzureSqlDatabaseLinkedServiceTypeProperties extends SqlServer * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -83,26 +77,22 @@ public final class AzureSqlDatabaseLinkedServiceTypeProperties extends SqlServer * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Sql always encrypted properties. */ - @JsonProperty(value = "alwaysEncryptedSettings") private SqlAlwaysEncryptedProperties alwaysEncryptedSettings; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -596,4 +586,149 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", server()); + jsonWriter.writeUntypedField("database", database()); + jsonWriter.writeUntypedField("encrypt", encrypt()); + jsonWriter.writeUntypedField("trustServerCertificate", trustServerCertificate()); + jsonWriter.writeUntypedField("hostNameInCertificate", hostnameInCertificate()); + jsonWriter.writeUntypedField("applicationIntent", applicationIntent()); + jsonWriter.writeUntypedField("connectTimeout", connectTimeout()); + jsonWriter.writeUntypedField("connectRetryCount", connectRetryCount()); + jsonWriter.writeUntypedField("connectRetryInterval", connectRetryInterval()); + jsonWriter.writeUntypedField("loadBalanceTimeout", loadBalanceTimeout()); + jsonWriter.writeUntypedField("commandTimeout", commandTimeout()); + jsonWriter.writeUntypedField("integratedSecurity", integratedSecurity()); + jsonWriter.writeUntypedField("failoverPartner", failoverPartner()); + jsonWriter.writeUntypedField("maxPoolSize", maxPoolSize()); + jsonWriter.writeUntypedField("minPoolSize", minPoolSize()); + jsonWriter.writeUntypedField("multipleActiveResultSets", multipleActiveResultSets()); + jsonWriter.writeUntypedField("multiSubnetFailover", multiSubnetFailover()); + jsonWriter.writeUntypedField("packetSize", packetSize()); + jsonWriter.writeUntypedField("pooling", pooling()); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("alwaysEncryptedSettings", this.alwaysEncryptedSettings); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDatabaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDatabaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlDatabaseLinkedServiceTypeProperties. + */ + public static AzureSqlDatabaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDatabaseLinkedServiceTypeProperties deserializedAzureSqlDatabaseLinkedServiceTypeProperties + = new AzureSqlDatabaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withServer(reader.readUntyped()); + } else if ("database".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withDatabase(reader.readUntyped()); + } else if ("encrypt".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withEncrypt(reader.readUntyped()); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withTrustServerCertificate(reader.readUntyped()); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withHostnameInCertificate(reader.readUntyped()); + } else if ("applicationIntent".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withApplicationIntent(reader.readUntyped()); + } else if ("connectTimeout".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withConnectTimeout(reader.readUntyped()); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withConnectRetryCount(reader.readUntyped()); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withConnectRetryInterval(reader.readUntyped()); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withLoadBalanceTimeout(reader.readUntyped()); + } else if ("commandTimeout".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withCommandTimeout(reader.readUntyped()); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withIntegratedSecurity(reader.readUntyped()); + } else if ("failoverPartner".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withFailoverPartner(reader.readUntyped()); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withMaxPoolSize(reader.readUntyped()); + } else if ("minPoolSize".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withMinPoolSize(reader.readUntyped()); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withMultipleActiveResultSets(reader.readUntyped()); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties + .withMultiSubnetFailover(reader.readUntyped()); + } else if ("packetSize".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withPacketSize(reader.readUntyped()); + } else if ("pooling".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.withPooling(reader.readUntyped()); + } else if ("connectionString".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.authenticationType + = AzureSqlDatabaseAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("alwaysEncryptedSettings".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.alwaysEncryptedSettings + = SqlAlwaysEncryptedProperties.fromJson(reader); + } else if ("credential".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlDatabaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java index 16ee5b5edda5..6b4a1c2148ed 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMILinkedServiceTypeProperties.java @@ -5,13 +5,16 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; import com.azure.resourcemanager.datafactory.models.AzureSqlMIAuthenticationType; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedProperties; import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure SQL Managed Instance linked service properties. @@ -21,45 +24,38 @@ public final class AzureSqlMILinkedServiceTypeProperties extends SqlServerBaseLi /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private AzureSqlMIAuthenticationType authenticationType; /* * The user name to be used when connecting to server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Azure SQL Managed Instance. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -68,14 +64,12 @@ public final class AzureSqlMILinkedServiceTypeProperties extends SqlServerBaseLi * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -83,26 +77,22 @@ public final class AzureSqlMILinkedServiceTypeProperties extends SqlServerBaseLi * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Sql always encrypted properties. */ - @JsonProperty(value = "alwaysEncryptedSettings") private SqlAlwaysEncryptedProperties alwaysEncryptedSettings; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -595,4 +585,141 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", server()); + jsonWriter.writeUntypedField("database", database()); + jsonWriter.writeUntypedField("encrypt", encrypt()); + jsonWriter.writeUntypedField("trustServerCertificate", trustServerCertificate()); + jsonWriter.writeUntypedField("hostNameInCertificate", hostnameInCertificate()); + jsonWriter.writeUntypedField("applicationIntent", applicationIntent()); + jsonWriter.writeUntypedField("connectTimeout", connectTimeout()); + jsonWriter.writeUntypedField("connectRetryCount", connectRetryCount()); + jsonWriter.writeUntypedField("connectRetryInterval", connectRetryInterval()); + jsonWriter.writeUntypedField("loadBalanceTimeout", loadBalanceTimeout()); + jsonWriter.writeUntypedField("commandTimeout", commandTimeout()); + jsonWriter.writeUntypedField("integratedSecurity", integratedSecurity()); + jsonWriter.writeUntypedField("failoverPartner", failoverPartner()); + jsonWriter.writeUntypedField("maxPoolSize", maxPoolSize()); + jsonWriter.writeUntypedField("minPoolSize", minPoolSize()); + jsonWriter.writeUntypedField("multipleActiveResultSets", multipleActiveResultSets()); + jsonWriter.writeUntypedField("multiSubnetFailover", multiSubnetFailover()); + jsonWriter.writeUntypedField("packetSize", packetSize()); + jsonWriter.writeUntypedField("pooling", pooling()); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("alwaysEncryptedSettings", this.alwaysEncryptedSettings); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlMILinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlMILinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlMILinkedServiceTypeProperties. + */ + public static AzureSqlMILinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlMILinkedServiceTypeProperties deserializedAzureSqlMILinkedServiceTypeProperties + = new AzureSqlMILinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withServer(reader.readUntyped()); + } else if ("database".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withDatabase(reader.readUntyped()); + } else if ("encrypt".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withEncrypt(reader.readUntyped()); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withTrustServerCertificate(reader.readUntyped()); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withHostnameInCertificate(reader.readUntyped()); + } else if ("applicationIntent".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withApplicationIntent(reader.readUntyped()); + } else if ("connectTimeout".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withConnectTimeout(reader.readUntyped()); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withConnectRetryCount(reader.readUntyped()); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withConnectRetryInterval(reader.readUntyped()); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withLoadBalanceTimeout(reader.readUntyped()); + } else if ("commandTimeout".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withCommandTimeout(reader.readUntyped()); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withIntegratedSecurity(reader.readUntyped()); + } else if ("failoverPartner".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withFailoverPartner(reader.readUntyped()); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withMaxPoolSize(reader.readUntyped()); + } else if ("minPoolSize".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withMinPoolSize(reader.readUntyped()); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties + .withMultipleActiveResultSets(reader.readUntyped()); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withMultiSubnetFailover(reader.readUntyped()); + } else if ("packetSize".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withPacketSize(reader.readUntyped()); + } else if ("pooling".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.withPooling(reader.readUntyped()); + } else if ("connectionString".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.authenticationType + = AzureSqlMIAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("alwaysEncryptedSettings".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.alwaysEncryptedSettings + = SqlAlwaysEncryptedProperties.fromJson(reader); + } else if ("credential".equals(fieldName)) { + deserializedAzureSqlMILinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlMILinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMITableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMITableDatasetTypeProperties.java index 382d63ff265f..3eaf078245b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMITableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlMITableDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure SQL Managed Instance dataset properties. */ @Fluent -public final class AzureSqlMITableDatasetTypeProperties { +public final class AzureSqlMITableDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -109,4 +111,47 @@ public AzureSqlMITableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlMITableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlMITableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlMITableDatasetTypeProperties. + */ + public static AzureSqlMITableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlMITableDatasetTypeProperties deserializedAzureSqlMITableDatasetTypeProperties + = new AzureSqlMITableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzureSqlMITableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlMITableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAzureSqlMITableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlMITableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlTableDatasetTypeProperties.java index 31c1bab15bed..1c76c5529e48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSqlTableDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure SQL dataset properties. */ @Fluent -public final class AzureSqlTableDatasetTypeProperties { +public final class AzureSqlTableDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The schema name of the Azure SQL database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the Azure SQL database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -109,4 +110,47 @@ public AzureSqlTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlTableDatasetTypeProperties. + */ + public static AzureSqlTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlTableDatasetTypeProperties deserializedAzureSqlTableDatasetTypeProperties + = new AzureSqlTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzureSqlTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedAzureSqlTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSqlTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java index 1265cf907238..8bcb76ab2f1d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureStorageLinkedServiceTypeProperties.java @@ -5,45 +5,45 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Storage linked service properties. */ @Fluent -public final class AzureStorageLinkedServiceTypeProperties { +public class AzureStorageLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of accountKey in connection string. */ - @JsonProperty(value = "accountKey") private AzureKeyVaultSecretReference accountKey; /* * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, * SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "sasUri") private Object sasUri; /* * The Azure key vault secret reference of sasToken in sas uri. */ - @JsonProperty(value = "sasToken") private AzureKeyVaultSecretReference sasToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -171,4 +171,55 @@ public void validate() { sasToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("accountKey", this.accountKey); + jsonWriter.writeUntypedField("sasUri", this.sasUri); + jsonWriter.writeJsonField("sasToken", this.sasToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureStorageLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureStorageLinkedServiceTypeProperties. + */ + public static AzureStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureStorageLinkedServiceTypeProperties deserializedAzureStorageLinkedServiceTypeProperties + = new AzureStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzureStorageLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("accountKey".equals(fieldName)) { + deserializedAzureStorageLinkedServiceTypeProperties.accountKey + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("sasUri".equals(fieldName)) { + deserializedAzureStorageLinkedServiceTypeProperties.sasUri = reader.readUntyped(); + } else if ("sasToken".equals(fieldName)) { + deserializedAzureStorageLinkedServiceTypeProperties.sasToken + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureStorageLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureStorageLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java index 340f5b58cbb5..990cee734206 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Synapse Analytics (Artifacts) linked service properties. */ @Fluent -public final class AzureSynapseArtifactsLinkedServiceTypeProperties { +public final class AzureSynapseArtifactsLinkedServiceTypeProperties + implements JsonSerializable { /* * https://.dev.azuresynapse.net, Azure Synapse Analytics workspace URL. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * Required to specify MSI, if using system assigned managed identity as authentication method. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "authentication") private Object authentication; /* @@ -32,7 +35,6 @@ public final class AzureSynapseArtifactsLinkedServiceTypeProperties { * /subscriptions/{subscriptionID}/resourceGroups/{resourceGroup}/providers/Microsoft.Synapse/workspaces/{ * workspaceName}. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "workspaceResourceId") private Object workspaceResourceId; /** @@ -123,4 +125,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSynapseArtifactsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("authentication", this.authentication); + jsonWriter.writeUntypedField("workspaceResourceId", this.workspaceResourceId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSynapseArtifactsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSynapseArtifactsLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSynapseArtifactsLinkedServiceTypeProperties. + */ + public static AzureSynapseArtifactsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSynapseArtifactsLinkedServiceTypeProperties deserializedAzureSynapseArtifactsLinkedServiceTypeProperties + = new AzureSynapseArtifactsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("authentication".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedServiceTypeProperties.authentication = reader.readUntyped(); + } else if ("workspaceResourceId".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedServiceTypeProperties.workspaceResourceId + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSynapseArtifactsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java index 0a8c11f4be1a..c8376ad24546 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableDatasetTypeProperties.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Table dataset properties. */ @Fluent -public final class AzureTableDatasetTypeProperties { +public final class AzureTableDatasetTypeProperties implements JsonSerializable { /* * The table name of the Azure Table storage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName", required = true) private Object tableName; /** @@ -61,4 +64,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureTableDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureTableDatasetTypeProperties. + */ + public static AzureTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableDatasetTypeProperties deserializedAzureTableDatasetTypeProperties + = new AzureTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedAzureTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableStorageLinkedServiceTypeProperties.java new file mode 100644 index 000000000000..78c8768b0aff --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureTableStorageLinkedServiceTypeProperties.java @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; +import com.azure.resourcemanager.datafactory.models.CredentialReference; +import java.io.IOException; + +/** + * Azure Table Storage linked service properties. + */ +@Fluent +public final class AzureTableStorageLinkedServiceTypeProperties extends AzureStorageLinkedServiceTypeProperties { + /* + * Table service endpoint of the Azure Table Storage resource. It is mutually exclusive with connectionString, + * sasUri property. + */ + private Object serviceEndpoint; + + /* + * The credential reference containing authentication information. + */ + private CredentialReference credential; + + /** + * Creates an instance of AzureTableStorageLinkedServiceTypeProperties class. + */ + public AzureTableStorageLinkedServiceTypeProperties() { + } + + /** + * Get the serviceEndpoint property: Table service endpoint of the Azure Table Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @return the serviceEndpoint value. + */ + public Object serviceEndpoint() { + return this.serviceEndpoint; + } + + /** + * Set the serviceEndpoint property: Table service endpoint of the Azure Table Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @param serviceEndpoint the serviceEndpoint value to set. + * @return the AzureTableStorageLinkedServiceTypeProperties object itself. + */ + public AzureTableStorageLinkedServiceTypeProperties withServiceEndpoint(Object serviceEndpoint) { + this.serviceEndpoint = serviceEndpoint; + return this; + } + + /** + * Get the credential property: The credential reference containing authentication information. + * + * @return the credential value. + */ + public CredentialReference credential() { + return this.credential; + } + + /** + * Set the credential property: The credential reference containing authentication information. + * + * @param credential the credential value to set. + * @return the AzureTableStorageLinkedServiceTypeProperties object itself. + */ + public AzureTableStorageLinkedServiceTypeProperties withCredential(CredentialReference credential) { + this.credential = credential; + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public AzureTableStorageLinkedServiceTypeProperties withConnectionString(Object connectionString) { + super.withConnectionString(connectionString); + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public AzureTableStorageLinkedServiceTypeProperties withAccountKey(AzureKeyVaultSecretReference accountKey) { + super.withAccountKey(accountKey); + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public AzureTableStorageLinkedServiceTypeProperties withSasUri(Object sasUri) { + super.withSasUri(sasUri); + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public AzureTableStorageLinkedServiceTypeProperties withSasToken(AzureKeyVaultSecretReference sasToken) { + super.withSasToken(sasToken); + return this; + } + + /** + * {@inheritDoc} + */ + @Override + public AzureTableStorageLinkedServiceTypeProperties withEncryptedCredential(String encryptedCredential) { + super.withEncryptedCredential(encryptedCredential); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + super.validate(); + if (credential() != null) { + credential().validate(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", connectionString()); + jsonWriter.writeJsonField("accountKey", accountKey()); + jsonWriter.writeUntypedField("sasUri", sasUri()); + jsonWriter.writeJsonField("sasToken", sasToken()); + jsonWriter.writeStringField("encryptedCredential", encryptedCredential()); + jsonWriter.writeUntypedField("serviceEndpoint", this.serviceEndpoint); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableStorageLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureTableStorageLinkedServiceTypeProperties. + */ + public static AzureTableStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableStorageLinkedServiceTypeProperties deserializedAzureTableStorageLinkedServiceTypeProperties + = new AzureTableStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties.withConnectionString(reader.readUntyped()); + } else if ("accountKey".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties + .withAccountKey(AzureKeyVaultSecretReference.fromJson(reader)); + } else if ("sasUri".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties.withSasUri(reader.readUntyped()); + } else if ("sasToken".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties + .withSasToken(AzureKeyVaultSecretReference.fromJson(reader)); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties + .withEncryptedCredential(reader.getString()); + } else if ("serviceEndpoint".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties.serviceEndpoint = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedAzureTableStorageLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureTableStorageLinkedServiceTypeProperties; + }); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java index 93216164bc52..87a2626742b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BinaryDatasetTypeProperties.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Binary dataset properties. */ @Fluent -public final class BinaryDatasetTypeProperties { +public final class BinaryDatasetTypeProperties implements JsonSerializable { /* * The location of the Binary storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The data compression method used for the binary dataset. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -92,4 +94,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BinaryDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BinaryDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BinaryDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BinaryDatasetTypeProperties. + */ + public static BinaryDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BinaryDatasetTypeProperties deserializedBinaryDatasetTypeProperties = new BinaryDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedBinaryDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedBinaryDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedBinaryDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java index 71dc199d9d82..015f52e5a5f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobEventsTriggerTypeProperties.java @@ -6,21 +6,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.BlobEventTypes; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Blob Events Trigger properties. */ @Fluent -public final class BlobEventsTriggerTypeProperties { +public final class BlobEventsTriggerTypeProperties implements JsonSerializable { /* * The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' * will only fire the trigger for blobs in the december folder under the records container. At least one of these * must be provided: blobPathBeginsWith, blobPathEndsWith. */ - @JsonProperty(value = "blobPathBeginsWith") private String blobPathBeginsWith; /* @@ -28,25 +31,21 @@ public final class BlobEventsTriggerTypeProperties { * fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: * blobPathBeginsWith, blobPathEndsWith. */ - @JsonProperty(value = "blobPathEndsWith") private String blobPathEndsWith; /* * If set to true, blobs with zero bytes will be ignored. */ - @JsonProperty(value = "ignoreEmptyBlobs") private Boolean ignoreEmptyBlobs; /* * The type of events that cause this trigger to fire. */ - @JsonProperty(value = "events", required = true) private List events; /* * The ARM resource ID of the Storage Account. */ - @JsonProperty(value = "scope", required = true) private String scope; /** @@ -182,4 +181,58 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BlobEventsTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("events", this.events, + (writer, element) -> writer.writeString(element == null ? null : element.toString())); + jsonWriter.writeStringField("scope", this.scope); + jsonWriter.writeStringField("blobPathBeginsWith", this.blobPathBeginsWith); + jsonWriter.writeStringField("blobPathEndsWith", this.blobPathEndsWith); + jsonWriter.writeBooleanField("ignoreEmptyBlobs", this.ignoreEmptyBlobs); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobEventsTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobEventsTriggerTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BlobEventsTriggerTypeProperties. + */ + public static BlobEventsTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobEventsTriggerTypeProperties deserializedBlobEventsTriggerTypeProperties + = new BlobEventsTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("events".equals(fieldName)) { + List events + = reader.readArray(reader1 -> BlobEventTypes.fromString(reader1.getString())); + deserializedBlobEventsTriggerTypeProperties.events = events; + } else if ("scope".equals(fieldName)) { + deserializedBlobEventsTriggerTypeProperties.scope = reader.getString(); + } else if ("blobPathBeginsWith".equals(fieldName)) { + deserializedBlobEventsTriggerTypeProperties.blobPathBeginsWith = reader.getString(); + } else if ("blobPathEndsWith".equals(fieldName)) { + deserializedBlobEventsTriggerTypeProperties.blobPathEndsWith = reader.getString(); + } else if ("ignoreEmptyBlobs".equals(fieldName)) { + deserializedBlobEventsTriggerTypeProperties.ignoreEmptyBlobs + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedBlobEventsTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java index 7d20ea0e4341..25c881463552 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/BlobTriggerTypeProperties.java @@ -6,30 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Blob Trigger properties. */ @Fluent -public final class BlobTriggerTypeProperties { +public final class BlobTriggerTypeProperties implements JsonSerializable { /* * The path of the container/folder that will trigger the pipeline. */ - @JsonProperty(value = "folderPath", required = true) private String folderPath; /* * The max number of parallel files to handle when it is triggered. */ - @JsonProperty(value = "maxConcurrency", required = true) private int maxConcurrency; /* * The Azure Storage linked service reference. */ - @JsonProperty(value = "linkedService", required = true) private LinkedServiceReference linkedService; /** @@ -119,4 +120,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BlobTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("folderPath", this.folderPath); + jsonWriter.writeIntField("maxConcurrency", this.maxConcurrency); + jsonWriter.writeJsonField("linkedService", this.linkedService); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobTriggerTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BlobTriggerTypeProperties. + */ + public static BlobTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobTriggerTypeProperties deserializedBlobTriggerTypeProperties = new BlobTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedBlobTriggerTypeProperties.folderPath = reader.getString(); + } else if ("maxConcurrency".equals(fieldName)) { + deserializedBlobTriggerTypeProperties.maxConcurrency = reader.getInt(); + } else if ("linkedService".equals(fieldName)) { + deserializedBlobTriggerTypeProperties.linkedService = LinkedServiceReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedBlobTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java index 0415da90e4d3..cbaecaaf1196 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraLinkedServiceTypeProperties.java @@ -6,49 +6,48 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Cassandra linked service properties. */ @Fluent -public final class CassandraLinkedServiceTypeProperties { +public final class CassandraLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "host", required = true) private Object host; /* * AuthenticationType to be used for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The port for the connection. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "port") private Object port; /* * Username for authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -198,4 +197,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CassandraLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CassandraLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CassandraLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CassandraLinkedServiceTypeProperties. + */ + public static CassandraLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CassandraLinkedServiceTypeProperties deserializedCassandraLinkedServiceTypeProperties + = new CassandraLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedCassandraLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCassandraLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java index 05b3de503311..3b20e32bc683 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CassandraTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Cassandra dataset properties. */ @Fluent -public final class CassandraTableDatasetTypeProperties { +public final class CassandraTableDatasetTypeProperties + implements JsonSerializable { /* * The table name of the Cassandra database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /* * The keyspace of the Cassandra database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "keyspace") private Object keyspace; /** @@ -81,4 +84,44 @@ public CassandraTableDatasetTypeProperties withKeyspace(Object keyspace) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("keyspace", this.keyspace); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CassandraTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CassandraTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CassandraTableDatasetTypeProperties. + */ + public static CassandraTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CassandraTableDatasetTypeProperties deserializedCassandraTableDatasetTypeProperties + = new CassandraTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedCassandraTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("keyspace".equals(fieldName)) { + deserializedCassandraTableDatasetTypeProperties.keyspace = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCassandraTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java index 6d951463be73..2425cb3ed632 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChainingTriggerTypeProperties.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Chaining Trigger properties. */ @Fluent -public final class ChainingTriggerTypeProperties { +public final class ChainingTriggerTypeProperties implements JsonSerializable { /* * Upstream Pipelines. */ - @JsonProperty(value = "dependsOn", required = true) private List dependsOn; /* * Run Dimension property that needs to be emitted by upstream pipelines. */ - @JsonProperty(value = "runDimension", required = true) private String runDimension; /** @@ -94,4 +96,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ChainingTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("dependsOn", this.dependsOn, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("runDimension", this.runDimension); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChainingTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChainingTriggerTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ChainingTriggerTypeProperties. + */ + public static ChainingTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChainingTriggerTypeProperties deserializedChainingTriggerTypeProperties + = new ChainingTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> PipelineReference.fromJson(reader1)); + deserializedChainingTriggerTypeProperties.dependsOn = dependsOn; + } else if ("runDimension".equals(fieldName)) { + deserializedChainingTriggerTypeProperties.runDimension = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedChainingTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java index edc86279eb79..00c2b32ba03d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCapture.java @@ -6,11 +6,15 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; import com.azure.resourcemanager.datafactory.models.MapperPolicy; import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** @@ -18,47 +22,40 @@ * the destination. */ @Fluent -public final class ChangeDataCapture { +public final class ChangeDataCapture implements JsonSerializable { /* * The folder that this CDC is in. If not specified, CDC will appear at the root level. */ - @JsonProperty(value = "folder") private ChangeDataCaptureFolder folder; /* * The description of the change data capture. */ - @JsonProperty(value = "description") private String description; /* * List of sources connections that can be used as sources in the CDC. */ - @JsonProperty(value = "sourceConnectionsInfo", required = true) private List sourceConnectionsInfo; /* * List of target connections that can be used as sources in the CDC. */ - @JsonProperty(value = "targetConnectionsInfo", required = true) private List targetConnectionsInfo; /* * CDC policy */ - @JsonProperty(value = "policy", required = true) private MapperPolicy policy; /* * A boolean to determine if the vnet configuration needs to be overwritten. */ - @JsonProperty(value = "allowVNetOverride") private Boolean allowVNetOverride; /* * Status of the CDC as to if it is running or stopped. */ - @JsonProperty(value = "status") private String status; /** @@ -239,4 +236,65 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ChangeDataCapture.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sourceConnectionsInfo", this.sourceConnectionsInfo, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("targetConnectionsInfo", this.targetConnectionsInfo, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("policy", this.policy); + jsonWriter.writeJsonField("folder", this.folder); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeBooleanField("allowVNetOverride", this.allowVNetOverride); + jsonWriter.writeStringField("status", this.status); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChangeDataCapture from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChangeDataCapture if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ChangeDataCapture. + */ + public static ChangeDataCapture fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChangeDataCapture deserializedChangeDataCapture = new ChangeDataCapture(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceConnectionsInfo".equals(fieldName)) { + List sourceConnectionsInfo + = reader.readArray(reader1 -> MapperSourceConnectionsInfo.fromJson(reader1)); + deserializedChangeDataCapture.sourceConnectionsInfo = sourceConnectionsInfo; + } else if ("targetConnectionsInfo".equals(fieldName)) { + List targetConnectionsInfo + = reader.readArray(reader1 -> MapperTargetConnectionsInfo.fromJson(reader1)); + deserializedChangeDataCapture.targetConnectionsInfo = targetConnectionsInfo; + } else if ("policy".equals(fieldName)) { + deserializedChangeDataCapture.policy = MapperPolicy.fromJson(reader); + } else if ("folder".equals(fieldName)) { + deserializedChangeDataCapture.folder = ChangeDataCaptureFolder.fromJson(reader); + } else if ("description".equals(fieldName)) { + deserializedChangeDataCapture.description = reader.getString(); + } else if ("allowVNetOverride".equals(fieldName)) { + deserializedChangeDataCapture.allowVNetOverride = reader.getNullable(JsonReader::getBoolean); + } else if ("status".equals(fieldName)) { + deserializedChangeDataCapture.status = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedChangeDataCapture; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java index c05fefc8974a..57b817d9e968 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ChangeDataCaptureResourceInner.java @@ -7,14 +7,14 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; import com.azure.resourcemanager.datafactory.models.MapperPolicy; import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -27,31 +27,26 @@ public final class ChangeDataCaptureResourceInner extends SubResource { /* * Properties of the change data capture. */ - @JsonProperty(value = "properties", required = true) private ChangeDataCapture innerProperties = new ChangeDataCapture(); /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /* * Change data capture resource type. */ - @JsonIgnore private Map additionalProperties; /** @@ -101,7 +96,6 @@ public String etag() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -117,14 +111,6 @@ public ChangeDataCaptureResourceInner withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * {@inheritDoc} */ @@ -313,4 +299,62 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ChangeDataCaptureResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.innerProperties); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChangeDataCaptureResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChangeDataCaptureResourceInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ChangeDataCaptureResourceInner. + */ + public static ChangeDataCaptureResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChangeDataCaptureResourceInner deserializedChangeDataCaptureResourceInner + = new ChangeDataCaptureResourceInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedChangeDataCaptureResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedChangeDataCaptureResourceInner.innerProperties = ChangeDataCapture.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedChangeDataCaptureResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedChangeDataCaptureResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedChangeDataCaptureResourceInner.etag = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedChangeDataCaptureResourceInner.additionalProperties = additionalProperties; + + return deserializedChangeDataCaptureResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java index 9995266145c9..b0a895269c2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CmdkeySetupTypeProperties.java @@ -6,30 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Cmdkey command custom setup type properties. */ @Fluent -public final class CmdkeySetupTypeProperties { +public final class CmdkeySetupTypeProperties implements JsonSerializable { /* * The server name of data source access. Type: string. */ - @JsonProperty(value = "targetName", required = true) private Object targetName; /* * The user name of data source access. Type: string. */ - @JsonProperty(value = "userName", required = true) private Object username; /* * The password of data source access. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /** @@ -124,4 +125,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CmdkeySetupTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("targetName", this.targetName); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CmdkeySetupTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CmdkeySetupTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CmdkeySetupTypeProperties. + */ + public static CmdkeySetupTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CmdkeySetupTypeProperties deserializedCmdkeySetupTypeProperties = new CmdkeySetupTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("targetName".equals(fieldName)) { + deserializedCmdkeySetupTypeProperties.targetName = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedCmdkeySetupTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedCmdkeySetupTypeProperties.password = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedCmdkeySetupTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java index b03b39d08085..63aeb5158ff0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsEntityDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Common Data Service for Apps entity dataset properties. */ @Fluent -public final class CommonDataServiceForAppsEntityDatasetTypeProperties { +public final class CommonDataServiceForAppsEntityDatasetTypeProperties + implements JsonSerializable { /* * The logical name of the entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "entityName") private Object entityName; /** @@ -51,4 +55,42 @@ public CommonDataServiceForAppsEntityDatasetTypeProperties withEntityName(Object */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("entityName", this.entityName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsEntityDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsEntityDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsEntityDatasetTypeProperties. + */ + public static CommonDataServiceForAppsEntityDatasetTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsEntityDatasetTypeProperties deserializedCommonDataServiceForAppsEntityDatasetTypeProperties + = new CommonDataServiceForAppsEntityDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("entityName".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDatasetTypeProperties.entityName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCommonDataServiceForAppsEntityDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java index e7bb1f08ae1e..a233d3669dd1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CommonDataServiceForAppsLinkedServiceTypeProperties.java @@ -6,41 +6,42 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Common Data Service for Apps linked service properties. */ @Fluent -public final class CommonDataServiceForAppsLinkedServiceTypeProperties { +public final class CommonDataServiceForAppsLinkedServiceTypeProperties + implements JsonSerializable { /* * The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps * Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "deploymentType", required = true) private Object deploymentType; /* * The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and * not allowed for online. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "hostName") private Object hostname; /* * The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed * for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not * allowed for on-prem. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUri") private Object serviceUri; /* @@ -48,42 +49,42 @@ public final class CommonDataServiceForAppsLinkedServiceTypeProperties { * required for online when there are more than one Common Data Service for Apps instances associated with the user. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "organizationName") private Object organizationName; /* * The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' - * for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. - * Type: string (or Expression with resultType string). + * for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario, + * 'Active Directory' for Dynamics on-premises with IFD. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType", required = true) private Object authenticationType; + /* + * The Active Directory domain that will verify user credentials. Type: string (or Expression with resultType + * string). + */ + private Object domain; + /* * User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "username") private Object username; /* * Password to access the Common Data Service for Apps instance. */ - @JsonProperty(value = "password") private SecretBase password; /* * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -92,14 +93,12 @@ public final class CommonDataServiceForAppsLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -227,7 +226,8 @@ public CommonDataServiceForAppsLinkedServiceTypeProperties withOrganizationName( /** * Get the authenticationType property: The authentication type to connect to Common Data Service for Apps server. * 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for - * Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + * Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: + * string (or Expression with resultType string). * * @return the authenticationType value. */ @@ -238,7 +238,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Common Data Service for Apps server. * 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for - * Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + * Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: + * string (or Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the CommonDataServiceForAppsLinkedServiceTypeProperties object itself. @@ -248,6 +249,28 @@ public CommonDataServiceForAppsLinkedServiceTypeProperties withAuthenticationTyp return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.domain; + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the CommonDataServiceForAppsLinkedServiceTypeProperties object itself. + */ + public CommonDataServiceForAppsLinkedServiceTypeProperties withDomain(Object domain) { + this.domain = domain; + return this; + } + /** * Get the username property: User name to access the Common Data Service for Apps instance. Type: string (or * Expression with resultType string). @@ -412,4 +435,87 @@ public void validate() { private static final ClientLogger LOGGER = new ClientLogger(CommonDataServiceForAppsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("deploymentType", this.deploymentType); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("hostName", this.hostname); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("serviceUri", this.serviceUri); + jsonWriter.writeUntypedField("organizationName", this.organizationName); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsLinkedServiceTypeProperties. + */ + public static CommonDataServiceForAppsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsLinkedServiceTypeProperties deserializedCommonDataServiceForAppsLinkedServiceTypeProperties + = new CommonDataServiceForAppsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("deploymentType".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.deploymentType + = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.authenticationType + = reader.readUntyped(); + } else if ("hostName".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.hostname = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("serviceUri".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.serviceUri = reader.readUntyped(); + } else if ("organizationName".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.organizationName + = reader.readUntyped(); + } else if ("domain".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.domain = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.password + = SecretBase.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.servicePrincipalId + = reader.readUntyped(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCommonDataServiceForAppsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java index 5b640d71f396..ee21c7336c2e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ConcurLinkedServiceTypeProperties.java @@ -6,63 +6,59 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Concur Service linked service properties. */ @Fluent -public final class ConcurLinkedServiceTypeProperties { +public final class ConcurLinkedServiceTypeProperties implements JsonSerializable { /* * Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. * Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * Application client_id supplied by Concur App Management. */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * The user name that you use to access Concur Service. */ - @JsonProperty(value = "username", required = true) private Object username; /* * The password corresponding to the user name that you provided in the username field. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -263,4 +259,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ConcurLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConcurLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConcurLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ConcurLinkedServiceTypeProperties. + */ + public static ConcurLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConcurLinkedServiceTypeProperties deserializedConcurLinkedServiceTypeProperties + = new ConcurLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("clientId".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("connectionProperties".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedConcurLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedConcurLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java index 6b49441185b7..5a3b9e64e3b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CopyActivityTypeProperties.java @@ -6,6 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CopySink; import com.azure.resourcemanager.datafactory.models.CopySource; import com.azure.resourcemanager.datafactory.models.LogSettings; @@ -13,105 +17,90 @@ import com.azure.resourcemanager.datafactory.models.RedirectIncompatibleRowSettings; import com.azure.resourcemanager.datafactory.models.SkipErrorFile; import com.azure.resourcemanager.datafactory.models.StagingSettings; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Copy activity properties. */ @Fluent -public final class CopyActivityTypeProperties { +public final class CopyActivityTypeProperties implements JsonSerializable { /* * Copy activity source. */ - @JsonProperty(value = "source", required = true) private CopySource source; /* * Copy activity sink. */ - @JsonProperty(value = "sink", required = true) private CopySink sink; /* * Copy activity translator. If not specified, tabular translator is used. */ - @JsonProperty(value = "translator") private Object translator; /* * Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "enableStaging") private Object enableStaging; /* * Specifies interim staging settings when EnableStaging is true. */ - @JsonProperty(value = "stagingSettings") private StagingSettings stagingSettings; /* * Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: * integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "parallelCopies") private Object parallelCopies; /* * Maximum number of data integration units that can be used to perform this data movement. Type: integer (or * Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "dataIntegrationUnits") private Object dataIntegrationUnits; /* * Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableSkipIncompatibleRow") private Object enableSkipIncompatibleRow; /* * Redirect incompatible row settings when EnableSkipIncompatibleRow is true. */ - @JsonProperty(value = "redirectIncompatibleRowSettings") private RedirectIncompatibleRowSettings redirectIncompatibleRowSettings; /* * (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. */ - @JsonProperty(value = "logStorageSettings") private LogStorageSettings logStorageSettings; /* * Log settings customer needs provide when enabling log. */ - @JsonProperty(value = "logSettings") private LogSettings logSettings; /* * Preserve Rules. */ - @JsonProperty(value = "preserveRules") private List preserveRules; /* * Preserve rules. */ - @JsonProperty(value = "preserve") private List preserve; /* * Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "validateDataConsistency") private Object validateDataConsistency; /* * Specify the fault tolerance for data consistency. */ - @JsonProperty(value = "skipErrorFile") private SkipErrorFile skipErrorFile; /** @@ -473,4 +462,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CopyActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("source", this.source); + jsonWriter.writeJsonField("sink", this.sink); + jsonWriter.writeUntypedField("translator", this.translator); + jsonWriter.writeUntypedField("enableStaging", this.enableStaging); + jsonWriter.writeJsonField("stagingSettings", this.stagingSettings); + jsonWriter.writeUntypedField("parallelCopies", this.parallelCopies); + jsonWriter.writeUntypedField("dataIntegrationUnits", this.dataIntegrationUnits); + jsonWriter.writeUntypedField("enableSkipIncompatibleRow", this.enableSkipIncompatibleRow); + jsonWriter.writeJsonField("redirectIncompatibleRowSettings", this.redirectIncompatibleRowSettings); + jsonWriter.writeJsonField("logStorageSettings", this.logStorageSettings); + jsonWriter.writeJsonField("logSettings", this.logSettings); + jsonWriter.writeArrayField("preserveRules", this.preserveRules, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("preserve", this.preserve, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("validateDataConsistency", this.validateDataConsistency); + jsonWriter.writeJsonField("skipErrorFile", this.skipErrorFile); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopyActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopyActivityTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CopyActivityTypeProperties. + */ + public static CopyActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopyActivityTypeProperties deserializedCopyActivityTypeProperties = new CopyActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("source".equals(fieldName)) { + deserializedCopyActivityTypeProperties.source = CopySource.fromJson(reader); + } else if ("sink".equals(fieldName)) { + deserializedCopyActivityTypeProperties.sink = CopySink.fromJson(reader); + } else if ("translator".equals(fieldName)) { + deserializedCopyActivityTypeProperties.translator = reader.readUntyped(); + } else if ("enableStaging".equals(fieldName)) { + deserializedCopyActivityTypeProperties.enableStaging = reader.readUntyped(); + } else if ("stagingSettings".equals(fieldName)) { + deserializedCopyActivityTypeProperties.stagingSettings = StagingSettings.fromJson(reader); + } else if ("parallelCopies".equals(fieldName)) { + deserializedCopyActivityTypeProperties.parallelCopies = reader.readUntyped(); + } else if ("dataIntegrationUnits".equals(fieldName)) { + deserializedCopyActivityTypeProperties.dataIntegrationUnits = reader.readUntyped(); + } else if ("enableSkipIncompatibleRow".equals(fieldName)) { + deserializedCopyActivityTypeProperties.enableSkipIncompatibleRow = reader.readUntyped(); + } else if ("redirectIncompatibleRowSettings".equals(fieldName)) { + deserializedCopyActivityTypeProperties.redirectIncompatibleRowSettings + = RedirectIncompatibleRowSettings.fromJson(reader); + } else if ("logStorageSettings".equals(fieldName)) { + deserializedCopyActivityTypeProperties.logStorageSettings = LogStorageSettings.fromJson(reader); + } else if ("logSettings".equals(fieldName)) { + deserializedCopyActivityTypeProperties.logSettings = LogSettings.fromJson(reader); + } else if ("preserveRules".equals(fieldName)) { + List preserveRules = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCopyActivityTypeProperties.preserveRules = preserveRules; + } else if ("preserve".equals(fieldName)) { + List preserve = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCopyActivityTypeProperties.preserve = preserve; + } else if ("validateDataConsistency".equals(fieldName)) { + deserializedCopyActivityTypeProperties.validateDataConsistency = reader.readUntyped(); + } else if ("skipErrorFile".equals(fieldName)) { + deserializedCopyActivityTypeProperties.skipErrorFile = SkipErrorFile.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedCopyActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java index 93c12a7d1f1c..6ccaf5655d12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java @@ -5,52 +5,51 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CosmosDbConnectionMode; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * CosmosDB linked service properties. */ @Fluent -public final class CosmosDbLinkedServiceTypeProperties { +public final class CosmosDbLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "accountEndpoint") private Object accountEndpoint; /* * The name of the database. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "database") private Object database; /* * The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "accountKey") private SecretBase accountKey; /* * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string. */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -59,14 +58,12 @@ public final class CosmosDbLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -74,26 +71,22 @@ public final class CosmosDbLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The connection mode used to access CosmosDB account. Type: string. */ - @JsonProperty(value = "connectionMode") private CosmosDbConnectionMode connectionMode; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -383,4 +376,78 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("accountEndpoint", this.accountEndpoint); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeJsonField("accountKey", this.accountKey); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeStringField("connectionMode", + this.connectionMode == null ? null : this.connectionMode.toString()); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CosmosDbLinkedServiceTypeProperties. + */ + public static CosmosDbLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbLinkedServiceTypeProperties deserializedCosmosDbLinkedServiceTypeProperties + = new CosmosDbLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("accountEndpoint".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.accountEndpoint = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("accountKey".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.accountKey = SecretBase.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("connectionMode".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.connectionMode + = CosmosDbConnectionMode.fromString(reader.getString()); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedCosmosDbLinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedCosmosDbLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java index d20230c16317..a09476d70b54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiCollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * CosmosDB (MongoDB API) database dataset properties. */ @Fluent -public final class CosmosDbMongoDbApiCollectionDatasetTypeProperties { +public final class CosmosDbMongoDbApiCollectionDatasetTypeProperties + implements JsonSerializable { /* * The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collection", required = true) private Object collection; /** @@ -62,4 +66,42 @@ public void validate() { private static final ClientLogger LOGGER = new ClientLogger(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collection", this.collection); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiCollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiCollectionDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiCollectionDatasetTypeProperties. + */ + public static CosmosDbMongoDbApiCollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiCollectionDatasetTypeProperties deserializedCosmosDbMongoDbApiCollectionDatasetTypeProperties + = new CosmosDbMongoDbApiCollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collection".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDatasetTypeProperties.collection = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCosmosDbMongoDbApiCollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java index a49cecedc532..f4c3a5d8f060 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbMongoDbApiLinkedServiceTypeProperties.java @@ -6,32 +6,34 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * CosmosDB (MongoDB API) linked service properties. */ @Fluent -public final class CosmosDbMongoDbApiLinkedServiceTypeProperties { +public final class CosmosDbMongoDbApiLinkedServiceTypeProperties + implements JsonSerializable { /* * Whether the CosmosDB (MongoDB API) server version is higher than 3.2. The default value is false. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "isServerVersionAbove32") private Object isServerVersionAbove32; /* * The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: * string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "database", required = true) private Object database; /** @@ -125,4 +127,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbMongoDbApiLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("isServerVersionAbove32", this.isServerVersionAbove32); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiLinkedServiceTypeProperties. + */ + public static CosmosDbMongoDbApiLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiLinkedServiceTypeProperties deserializedCosmosDbMongoDbApiLinkedServiceTypeProperties + = new CosmosDbMongoDbApiLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("isServerVersionAbove32".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedServiceTypeProperties.isServerVersionAbove32 + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCosmosDbMongoDbApiLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java index afa3515db0fc..7713c0b3cc1b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbSqlApiCollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * CosmosDB (SQL API) Collection dataset properties. */ @Fluent -public final class CosmosDbSqlApiCollectionDatasetTypeProperties { +public final class CosmosDbSqlApiCollectionDatasetTypeProperties + implements JsonSerializable { /* * CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collectionName", required = true) private Object collectionName; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbSqlApiCollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collectionName", this.collectionName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbSqlApiCollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbSqlApiCollectionDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbSqlApiCollectionDatasetTypeProperties. + */ + public static CosmosDbSqlApiCollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbSqlApiCollectionDatasetTypeProperties deserializedCosmosDbSqlApiCollectionDatasetTypeProperties + = new CosmosDbSqlApiCollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collectionName".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDatasetTypeProperties.collectionName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCosmosDbSqlApiCollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java index 5764efd229ae..c2758490be73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CouchbaseLinkedServiceTypeProperties.java @@ -5,31 +5,33 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Couchbase server linked service properties. */ @Fluent -public final class CouchbaseLinkedServiceTypeProperties { +public final class CouchbaseLinkedServiceTypeProperties + implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of credString in connection string. */ - @JsonProperty(value = "credString") private AzureKeyVaultSecretReference credString; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +114,48 @@ public void validate() { credString().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("credString", this.credString); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CouchbaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CouchbaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CouchbaseLinkedServiceTypeProperties. + */ + public static CouchbaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CouchbaseLinkedServiceTypeProperties deserializedCouchbaseLinkedServiceTypeProperties + = new CouchbaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedCouchbaseLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("credString".equals(fieldName)) { + deserializedCouchbaseLinkedServiceTypeProperties.credString + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedCouchbaseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCouchbaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateDataFlowDebugSessionResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateDataFlowDebugSessionResponseInner.java index 22f8e13f8ee5..2a57703cb388 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateDataFlowDebugSessionResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateDataFlowDebugSessionResponseInner.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Response body structure for creating data flow debug session. */ @Fluent -public final class CreateDataFlowDebugSessionResponseInner { +public final class CreateDataFlowDebugSessionResponseInner + implements JsonSerializable { /* * The state of the debug session. */ - @JsonProperty(value = "status") private String status; /* * The ID of data flow debug session. */ - @JsonProperty(value = "sessionId") private String sessionId; /** @@ -77,4 +80,44 @@ public CreateDataFlowDebugSessionResponseInner withSessionId(String sessionId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("status", this.status); + jsonWriter.writeStringField("sessionId", this.sessionId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CreateDataFlowDebugSessionResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CreateDataFlowDebugSessionResponseInner if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CreateDataFlowDebugSessionResponseInner. + */ + public static CreateDataFlowDebugSessionResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CreateDataFlowDebugSessionResponseInner deserializedCreateDataFlowDebugSessionResponseInner + = new CreateDataFlowDebugSessionResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("status".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionResponseInner.status = reader.getString(); + } else if ("sessionId".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionResponseInner.sessionId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCreateDataFlowDebugSessionResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java index 25dd6df109ac..a638a5c991a3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CreateRunResponseInner.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Response body with a run identifier. */ @Fluent -public final class CreateRunResponseInner { +public final class CreateRunResponseInner implements JsonSerializable { /* * Identifier of a run. */ - @JsonProperty(value = "runId", required = true) private String runId; /** @@ -58,4 +61,41 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CreateRunResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("runId", this.runId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CreateRunResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CreateRunResponseInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CreateRunResponseInner. + */ + public static CreateRunResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CreateRunResponseInner deserializedCreateRunResponseInner = new CreateRunResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("runId".equals(fieldName)) { + deserializedCreateRunResponseInner.runId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCreateRunResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java index 2f62c13a3de1..81d9b9737b2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CredentialResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Credential; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Credential resource type. @@ -18,25 +21,21 @@ public final class CredentialResourceInner extends SubResource { /* * Properties of credentials. */ - @JsonProperty(value = "properties", required = true) private Credential properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CredentialResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CredentialResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CredentialResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CredentialResourceInner. + */ + public static CredentialResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CredentialResourceInner deserializedCredentialResourceInner = new CredentialResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedCredentialResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedCredentialResourceInner.properties = Credential.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedCredentialResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedCredentialResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedCredentialResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCredentialResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java index 861d8cece10d..ec167fcce6a2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomActivityTypeProperties.java @@ -6,61 +6,56 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CustomActivityReferenceObject; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Custom activity properties. */ @Fluent -public final class CustomActivityTypeProperties { +public final class CustomActivityTypeProperties implements JsonSerializable { /* * Command for custom activity Type: string (or Expression with resultType string). */ - @JsonProperty(value = "command", required = true) private Object command; /* * Resource linked service reference. */ - @JsonProperty(value = "resourceLinkedService") private LinkedServiceReference resourceLinkedService; /* * Folder path for resource files Type: string (or Expression with resultType string). */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * Reference objects */ - @JsonProperty(value = "referenceObjects") private CustomActivityReferenceObject referenceObjects; /* * User defined property bag. There is no restriction on the keys or values that can be used. The user specified * custom activity has the full responsibility to consume and interpret the content defined. */ - @JsonProperty(value = "extendedProperties") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map extendedProperties; /* * The retention time for the files submitted for custom activity. Type: double (or Expression with resultType * double). */ - @JsonProperty(value = "retentionTimeInDays") private Object retentionTimeInDays; /* * Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType * double). */ - @JsonProperty(value = "autoUserSpecification") private Object autoUserSpecification; /** @@ -237,4 +232,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CustomActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("command", this.command); + jsonWriter.writeJsonField("resourceLinkedService", this.resourceLinkedService); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeJsonField("referenceObjects", this.referenceObjects); + jsonWriter.writeMapField("extendedProperties", this.extendedProperties, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("retentionTimeInDays", this.retentionTimeInDays); + jsonWriter.writeUntypedField("autoUserSpecification", this.autoUserSpecification); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomActivityTypeProperties. + */ + public static CustomActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomActivityTypeProperties deserializedCustomActivityTypeProperties = new CustomActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("command".equals(fieldName)) { + deserializedCustomActivityTypeProperties.command = reader.readUntyped(); + } else if ("resourceLinkedService".equals(fieldName)) { + deserializedCustomActivityTypeProperties.resourceLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("folderPath".equals(fieldName)) { + deserializedCustomActivityTypeProperties.folderPath = reader.readUntyped(); + } else if ("referenceObjects".equals(fieldName)) { + deserializedCustomActivityTypeProperties.referenceObjects + = CustomActivityReferenceObject.fromJson(reader); + } else if ("extendedProperties".equals(fieldName)) { + Map extendedProperties = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedCustomActivityTypeProperties.extendedProperties = extendedProperties; + } else if ("retentionTimeInDays".equals(fieldName)) { + deserializedCustomActivityTypeProperties.retentionTimeInDays = reader.readUntyped(); + } else if ("autoUserSpecification".equals(fieldName)) { + deserializedCustomActivityTypeProperties.autoUserSpecification = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCustomActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java index 34a40405f9ad..8ffb0dce1ccb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CustomEventsTriggerTypeProperties.java @@ -6,38 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Custom Events Trigger properties. */ @Fluent -public final class CustomEventsTriggerTypeProperties { +public final class CustomEventsTriggerTypeProperties implements JsonSerializable { /* * The event subject must begin with the pattern provided for trigger to fire. At least one of these must be * provided: subjectBeginsWith, subjectEndsWith. */ - @JsonProperty(value = "subjectBeginsWith") private String subjectBeginsWith; /* * The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: * subjectBeginsWith, subjectEndsWith. */ - @JsonProperty(value = "subjectEndsWith") private String subjectEndsWith; /* * The list of event types that cause this trigger to fire. */ - @JsonProperty(value = "events", required = true) private List events; /* * The ARM resource ID of the Azure Event Grid Topic. */ - @JsonProperty(value = "scope", required = true) private String scope; /** @@ -149,4 +149,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CustomEventsTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("events", this.events, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("scope", this.scope); + jsonWriter.writeStringField("subjectBeginsWith", this.subjectBeginsWith); + jsonWriter.writeStringField("subjectEndsWith", this.subjectEndsWith); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomEventsTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomEventsTriggerTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomEventsTriggerTypeProperties. + */ + public static CustomEventsTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomEventsTriggerTypeProperties deserializedCustomEventsTriggerTypeProperties + = new CustomEventsTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("events".equals(fieldName)) { + List events = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCustomEventsTriggerTypeProperties.events = events; + } else if ("scope".equals(fieldName)) { + deserializedCustomEventsTriggerTypeProperties.scope = reader.getString(); + } else if ("subjectBeginsWith".equals(fieldName)) { + deserializedCustomEventsTriggerTypeProperties.subjectBeginsWith = reader.getString(); + } else if ("subjectEndsWith".equals(fieldName)) { + deserializedCustomEventsTriggerTypeProperties.subjectEndsWith = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCustomEventsTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugCommandResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugCommandResponseInner.java index 632efc0ef2d8..2c847ce887d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugCommandResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugCommandResponseInner.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Response body structure of data flow result for data preview, statistics or expression preview. */ @Fluent -public final class DataFlowDebugCommandResponseInner { +public final class DataFlowDebugCommandResponseInner implements JsonSerializable { /* * The run status of data preview, statistics or expression preview. */ - @JsonProperty(value = "status") private String status; /* * The result data of data preview, statistics or expression preview. */ - @JsonProperty(value = "data") private String data; /** @@ -77,4 +79,44 @@ public DataFlowDebugCommandResponseInner withData(String data) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("status", this.status); + jsonWriter.writeStringField("data", this.data); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugCommandResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugCommandResponseInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowDebugCommandResponseInner. + */ + public static DataFlowDebugCommandResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugCommandResponseInner deserializedDataFlowDebugCommandResponseInner + = new DataFlowDebugCommandResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("status".equals(fieldName)) { + deserializedDataFlowDebugCommandResponseInner.status = reader.getString(); + } else if ("data".equals(fieldName)) { + deserializedDataFlowDebugCommandResponseInner.data = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowDebugCommandResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugSessionInfoInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugSessionInfoInner.java index 5d99aa0d33c8..7848043f3612 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugSessionInfoInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowDebugSessionInfoInner.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,65 +17,55 @@ * Data flow debug session info. */ @Fluent -public final class DataFlowDebugSessionInfoInner { +public final class DataFlowDebugSessionInfoInner implements JsonSerializable { /* * The name of the data flow. */ - @JsonProperty(value = "dataFlowName") private String dataFlowName; /* * Compute type of the cluster. */ - @JsonProperty(value = "computeType") private String computeType; /* * Core count of the cluster. */ - @JsonProperty(value = "coreCount") private Integer coreCount; /* * Node count of the cluster. (deprecated property) */ - @JsonProperty(value = "nodeCount") private Integer nodeCount; /* * Attached integration runtime name of data flow debug session. */ - @JsonProperty(value = "integrationRuntimeName") private String integrationRuntimeName; /* * The ID of data flow debug session. */ - @JsonProperty(value = "sessionId") private String sessionId; /* * Start time of data flow debug session. */ - @JsonProperty(value = "startTime") private String startTime; /* * Compute type of the cluster. */ - @JsonProperty(value = "timeToLiveInMinutes") private Integer timeToLiveInMinutes; /* * Last activity time of data flow debug session. */ - @JsonProperty(value = "lastActivityTime") private String lastActivityTime; /* * Data flow debug session info. */ - @JsonIgnore private Map additionalProperties; /** @@ -268,7 +259,6 @@ public DataFlowDebugSessionInfoInner withLastActivityTime(String lastActivityTim * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -284,14 +274,6 @@ public DataFlowDebugSessionInfoInner withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -299,4 +281,77 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("dataFlowName", this.dataFlowName); + jsonWriter.writeStringField("computeType", this.computeType); + jsonWriter.writeNumberField("coreCount", this.coreCount); + jsonWriter.writeNumberField("nodeCount", this.nodeCount); + jsonWriter.writeStringField("integrationRuntimeName", this.integrationRuntimeName); + jsonWriter.writeStringField("sessionId", this.sessionId); + jsonWriter.writeStringField("startTime", this.startTime); + jsonWriter.writeNumberField("timeToLiveInMinutes", this.timeToLiveInMinutes); + jsonWriter.writeStringField("lastActivityTime", this.lastActivityTime); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugSessionInfoInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugSessionInfoInner if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowDebugSessionInfoInner. + */ + public static DataFlowDebugSessionInfoInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugSessionInfoInner deserializedDataFlowDebugSessionInfoInner + = new DataFlowDebugSessionInfoInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataFlowName".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.dataFlowName = reader.getString(); + } else if ("computeType".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.computeType = reader.getString(); + } else if ("coreCount".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.coreCount = reader.getNullable(JsonReader::getInt); + } else if ("nodeCount".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.nodeCount = reader.getNullable(JsonReader::getInt); + } else if ("integrationRuntimeName".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.integrationRuntimeName = reader.getString(); + } else if ("sessionId".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.sessionId = reader.getString(); + } else if ("startTime".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.startTime = reader.getString(); + } else if ("timeToLiveInMinutes".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.timeToLiveInMinutes + = reader.getNullable(JsonReader::getInt); + } else if ("lastActivityTime".equals(fieldName)) { + deserializedDataFlowDebugSessionInfoInner.lastActivityTime = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataFlowDebugSessionInfoInner.additionalProperties = additionalProperties; + + return deserializedDataFlowDebugSessionInfoInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java index ceeadc91ac6a..3f1bb049118c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataFlowResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Data flow resource type. @@ -18,25 +21,21 @@ public final class DataFlowResourceInner extends SubResource { /* * Data flow properties. */ - @JsonProperty(value = "properties", required = true) private DataFlow properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataFlowResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowResourceInner. + */ + public static DataFlowResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowResourceInner deserializedDataFlowResourceInner = new DataFlowResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedDataFlowResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedDataFlowResourceInner.properties = DataFlow.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedDataFlowResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedDataFlowResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedDataFlowResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java index 99c05ff2cadb..2932277e5cd5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataLakeAnalyticsUsqlActivityTypeProperties.java @@ -6,61 +6,57 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * DataLakeAnalyticsU-SQL activity properties. */ @Fluent -public final class DataLakeAnalyticsUsqlActivityTypeProperties { +public final class DataLakeAnalyticsUsqlActivityTypeProperties + implements JsonSerializable { /* * Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "scriptPath", required = true) private Object scriptPath; /* * Script linked service reference. */ - @JsonProperty(value = "scriptLinkedService", required = true) private LinkedServiceReference scriptLinkedService; /* * The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression * with resultType integer), minimum: 1. */ - @JsonProperty(value = "degreeOfParallelism") private Object degreeOfParallelism; /* * Determines which jobs out of all that are queued should be selected to run first. The lower the number, the * higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. */ - @JsonProperty(value = "priority") private Object priority; /* * Parameters for U-SQL job request. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "runtimeVersion") private Object runtimeVersion; /* * Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "compilationMode") private Object compilationMode; /** @@ -243,4 +239,62 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataLakeAnalyticsUsqlActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("scriptPath", this.scriptPath); + jsonWriter.writeJsonField("scriptLinkedService", this.scriptLinkedService); + jsonWriter.writeUntypedField("degreeOfParallelism", this.degreeOfParallelism); + jsonWriter.writeUntypedField("priority", this.priority); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("runtimeVersion", this.runtimeVersion); + jsonWriter.writeUntypedField("compilationMode", this.compilationMode); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataLakeAnalyticsUsqlActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataLakeAnalyticsUsqlActivityTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataLakeAnalyticsUsqlActivityTypeProperties. + */ + public static DataLakeAnalyticsUsqlActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataLakeAnalyticsUsqlActivityTypeProperties deserializedDataLakeAnalyticsUsqlActivityTypeProperties + = new DataLakeAnalyticsUsqlActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("scriptPath".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.scriptPath = reader.readUntyped(); + } else if ("scriptLinkedService".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.scriptLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("degreeOfParallelism".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.degreeOfParallelism = reader.readUntyped(); + } else if ("priority".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.priority = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.parameters = parameters; + } else if ("runtimeVersion".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.runtimeVersion = reader.readUntyped(); + } else if ("compilationMode".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivityTypeProperties.compilationMode = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataLakeAnalyticsUsqlActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java index 39ea19e53f8e..6e3b8652265f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksNotebookActivityTypeProperties.java @@ -6,8 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -15,26 +18,23 @@ * Databricks Notebook activity properties. */ @Fluent -public final class DatabricksNotebookActivityTypeProperties { +public final class DatabricksNotebookActivityTypeProperties + implements JsonSerializable { /* * The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "notebookPath", required = true) private Object notebookPath; /* * Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the * default value from the notebook will be used. */ - @JsonProperty(value = "baseParameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map baseParameters; /* * A list of libraries to be installed on the cluster that will execute the job. */ - @JsonProperty(value = "libraries") private List> libraries; /** @@ -121,4 +121,53 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksNotebookActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("notebookPath", this.notebookPath); + jsonWriter.writeMapField("baseParameters", this.baseParameters, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("libraries", this.libraries, + (writer, element) -> writer.writeMap(element, (writer1, element1) -> writer1.writeUntyped(element1))); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksNotebookActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksNotebookActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksNotebookActivityTypeProperties. + */ + public static DatabricksNotebookActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksNotebookActivityTypeProperties deserializedDatabricksNotebookActivityTypeProperties + = new DatabricksNotebookActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("notebookPath".equals(fieldName)) { + deserializedDatabricksNotebookActivityTypeProperties.notebookPath = reader.readUntyped(); + } else if ("baseParameters".equals(fieldName)) { + Map baseParameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedDatabricksNotebookActivityTypeProperties.baseParameters = baseParameters; + } else if ("libraries".equals(fieldName)) { + List> libraries + = reader.readArray(reader1 -> reader1.readMap(reader2 -> reader2.readUntyped())); + deserializedDatabricksNotebookActivityTypeProperties.libraries = libraries; + } else { + reader.skipChildren(); + } + } + + return deserializedDatabricksNotebookActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkJarActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkJarActivityTypeProperties.java index dc1d7f8094d6..6ec15098a816 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkJarActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkJarActivityTypeProperties.java @@ -6,7 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -14,24 +18,22 @@ * Databricks SparkJar activity properties. */ @Fluent -public final class DatabricksSparkJarActivityTypeProperties { +public final class DatabricksSparkJarActivityTypeProperties + implements JsonSerializable { /* * The full name of the class containing the main method to be executed. This class must be contained in a JAR * provided as a library. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mainClassName", required = true) private Object mainClassName; /* * Parameters that will be passed to the main method. */ - @JsonProperty(value = "parameters") private List parameters; /* * A list of libraries to be installed on the cluster that will execute the job. */ - @JsonProperty(value = "libraries") private List> libraries; /** @@ -116,4 +118,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksSparkJarActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("mainClassName", this.mainClassName); + jsonWriter.writeArrayField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("libraries", this.libraries, + (writer, element) -> writer.writeMap(element, (writer1, element1) -> writer1.writeUntyped(element1))); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksSparkJarActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksSparkJarActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksSparkJarActivityTypeProperties. + */ + public static DatabricksSparkJarActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksSparkJarActivityTypeProperties deserializedDatabricksSparkJarActivityTypeProperties + = new DatabricksSparkJarActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("mainClassName".equals(fieldName)) { + deserializedDatabricksSparkJarActivityTypeProperties.mainClassName = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDatabricksSparkJarActivityTypeProperties.parameters = parameters; + } else if ("libraries".equals(fieldName)) { + List> libraries + = reader.readArray(reader1 -> reader1.readMap(reader2 -> reader2.readUntyped())); + deserializedDatabricksSparkJarActivityTypeProperties.libraries = libraries; + } else { + reader.skipChildren(); + } + } + + return deserializedDatabricksSparkJarActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkPythonActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkPythonActivityTypeProperties.java index b7d5e9e6e8c0..05fdd74e07b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkPythonActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatabricksSparkPythonActivityTypeProperties.java @@ -6,7 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -14,24 +18,22 @@ * Databricks SparkPython activity properties. */ @Fluent -public final class DatabricksSparkPythonActivityTypeProperties { +public final class DatabricksSparkPythonActivityTypeProperties + implements JsonSerializable { /* * The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "pythonFile", required = true) private Object pythonFile; /* * Command line parameters that will be passed to the Python file. */ - @JsonProperty(value = "parameters") private List parameters; /* * A list of libraries to be installed on the cluster that will execute the job. */ - @JsonProperty(value = "libraries") private List> libraries; /** @@ -116,4 +118,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksSparkPythonActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("pythonFile", this.pythonFile); + jsonWriter.writeArrayField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("libraries", this.libraries, + (writer, element) -> writer.writeMap(element, (writer1, element1) -> writer1.writeUntyped(element1))); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksSparkPythonActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksSparkPythonActivityTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksSparkPythonActivityTypeProperties. + */ + public static DatabricksSparkPythonActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksSparkPythonActivityTypeProperties deserializedDatabricksSparkPythonActivityTypeProperties + = new DatabricksSparkPythonActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("pythonFile".equals(fieldName)) { + deserializedDatabricksSparkPythonActivityTypeProperties.pythonFile = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDatabricksSparkPythonActivityTypeProperties.parameters = parameters; + } else if ("libraries".equals(fieldName)) { + List> libraries + = reader.readArray(reader1 -> reader1.readMap(reader2 -> reader2.readUntyped())); + deserializedDatabricksSparkPythonActivityTypeProperties.libraries = libraries; + } else { + reader.skipChildren(); + } + } + + return deserializedDatabricksSparkPythonActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatasetResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatasetResourceInner.java index 02cbd47c8581..d2e47a6be31a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatasetResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DatasetResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Dataset; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Dataset resource type. @@ -18,25 +21,21 @@ public final class DatasetResourceInner extends SubResource { /* * Dataset properties. */ - @JsonProperty(value = "properties", required = true) private Dataset properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatasetResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatasetResourceInner. + */ + public static DatasetResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetResourceInner deserializedDatasetResourceInner = new DatasetResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedDatasetResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedDatasetResourceInner.properties = Dataset.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedDatasetResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedDatasetResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedDatasetResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDatasetResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataworldLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataworldLinkedServiceTypeProperties.java index 2cf94fd9d6af..25694b65942c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataworldLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DataworldLinkedServiceTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Dataworld linked service type properties. */ @Fluent -public final class DataworldLinkedServiceTypeProperties { +public final class DataworldLinkedServiceTypeProperties + implements JsonSerializable { /* * The api token for the Dataworld source. */ - @JsonProperty(value = "apiToken", required = true) private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -91,4 +94,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataworldLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataworldLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataworldLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataworldLinkedServiceTypeProperties. + */ + public static DataworldLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataworldLinkedServiceTypeProperties deserializedDataworldLinkedServiceTypeProperties + = new DataworldLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("apiToken".equals(fieldName)) { + deserializedDataworldLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDataworldLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataworldLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2LinkedServiceTypeProperties.java index 9b1044a4a130..ef935cf5ac9a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2LinkedServiceTypeProperties.java @@ -5,74 +5,69 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Db2AuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * DB2 linked service properties. */ @Fluent -public final class Db2LinkedServiceTypeProperties { +public final class Db2LinkedServiceTypeProperties implements JsonSerializable { /* * The connection string. It is mutually exclusive with server, database, authenticationType, userName, * packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * Database name for connection. It is mutually exclusive with connectionString property. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "database") private Object database; /* * AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */ - @JsonProperty(value = "authenticationType") private Db2AuthenticationType authenticationType; /* * Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * Under where packages are created when querying database. It is mutually exclusive with connectionString property. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "packageCollection") private Object packageCollection; /* * Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "certificateCommonName") private Object certificateCommonName; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. It is mutually exclusive with connectionString property. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -291,4 +286,67 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("packageCollection", this.packageCollection); + jsonWriter.writeUntypedField("certificateCommonName", this.certificateCommonName); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Db2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Db2LinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the Db2LinkedServiceTypeProperties. + */ + public static Db2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Db2LinkedServiceTypeProperties deserializedDb2LinkedServiceTypeProperties + = new Db2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("server".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.authenticationType + = Db2AuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("packageCollection".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.packageCollection = reader.readUntyped(); + } else if ("certificateCommonName".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.certificateCommonName = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDb2LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDb2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2TableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2TableDatasetTypeProperties.java index 475709ccd1a0..8f6e7fad3c9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2TableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Db2TableDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Db2 table dataset properties. */ @Fluent -public final class Db2TableDatasetTypeProperties { +public final class Db2TableDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The Db2 schema name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The Db2 table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -105,4 +106,47 @@ public Db2TableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Db2TableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Db2TableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the Db2TableDatasetTypeProperties. + */ + public static Db2TableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Db2TableDatasetTypeProperties deserializedDb2TableDatasetTypeProperties + = new Db2TableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedDb2TableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedDb2TableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedDb2TableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDb2TableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DeleteActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DeleteActivityTypeProperties.java index 4a7f0ca93a92..3c053ec4bd4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DeleteActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DeleteActivityTypeProperties.java @@ -6,52 +6,50 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetReference; import com.azure.resourcemanager.datafactory.models.LogStorageSettings; import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Delete activity properties. */ @Fluent -public final class DeleteActivityTypeProperties { +public final class DeleteActivityTypeProperties implements JsonSerializable { /* * If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * The max concurrent connections to connect data source at the same time. */ - @JsonProperty(value = "maxConcurrentConnections") private Integer maxConcurrentConnections; /* * Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "enableLogging") private Object enableLogging; /* * Log storage settings customer need to provide when enableLogging is true. */ - @JsonProperty(value = "logStorageSettings") private LogStorageSettings logStorageSettings; /* * Delete activity dataset reference. */ - @JsonProperty(value = "dataset", required = true) private DatasetReference dataset; /* * Delete activity store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /** @@ -208,4 +206,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DeleteActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("dataset", this.dataset); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeNumberField("maxConcurrentConnections", this.maxConcurrentConnections); + jsonWriter.writeUntypedField("enableLogging", this.enableLogging); + jsonWriter.writeJsonField("logStorageSettings", this.logStorageSettings); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DeleteActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DeleteActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DeleteActivityTypeProperties. + */ + public static DeleteActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DeleteActivityTypeProperties deserializedDeleteActivityTypeProperties = new DeleteActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataset".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.dataset = DatasetReference.fromJson(reader); + } else if ("recursive".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.recursive = reader.readUntyped(); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.maxConcurrentConnections + = reader.getNullable(JsonReader::getInt); + } else if ("enableLogging".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.enableLogging = reader.readUntyped(); + } else if ("logStorageSettings".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.logStorageSettings = LogStorageSettings.fromJson(reader); + } else if ("storeSettings".equals(fieldName)) { + deserializedDeleteActivityTypeProperties.storeSettings = StoreReadSettings.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDeleteActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DelimitedTextDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DelimitedTextDatasetTypeProperties.java index a1dc6662918c..d88130b83766 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DelimitedTextDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DelimitedTextDatasetTypeProperties.java @@ -6,30 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * DelimitedText dataset properties. */ @Fluent -public final class DelimitedTextDatasetTypeProperties { +public final class DelimitedTextDatasetTypeProperties implements JsonSerializable { /* * The location of the delimited text storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The column delimiter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "columnDelimiter") private Object columnDelimiter; /* * The row delimiter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "rowDelimiter") private Object rowDelimiter; /* @@ -38,44 +39,37 @@ public final class DelimitedTextDatasetTypeProperties { * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "encodingName") private Object encodingName; /* * The data compressionCodec. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "compressionCodec") private Object compressionCodec; /* * The data compression method used for DelimitedText. */ - @JsonProperty(value = "compressionLevel") private Object compressionLevel; /* * The quote character. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "quoteChar") private Object quoteChar; /* * The escape character. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "escapeChar") private Object escapeChar; /* * When used as input, treat the first row of data as headers. When used as output,write the headers into the output * as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "firstRowAsHeader") private Object firstRowAsHeader; /* * The null value string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nullValue") private Object nullValue; /** @@ -312,4 +306,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DelimitedTextDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("columnDelimiter", this.columnDelimiter); + jsonWriter.writeUntypedField("rowDelimiter", this.rowDelimiter); + jsonWriter.writeUntypedField("encodingName", this.encodingName); + jsonWriter.writeUntypedField("compressionCodec", this.compressionCodec); + jsonWriter.writeUntypedField("compressionLevel", this.compressionLevel); + jsonWriter.writeUntypedField("quoteChar", this.quoteChar); + jsonWriter.writeUntypedField("escapeChar", this.escapeChar); + jsonWriter.writeUntypedField("firstRowAsHeader", this.firstRowAsHeader); + jsonWriter.writeUntypedField("nullValue", this.nullValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DelimitedTextDatasetTypeProperties. + */ + public static DelimitedTextDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextDatasetTypeProperties deserializedDelimitedTextDatasetTypeProperties + = new DelimitedTextDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("columnDelimiter".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.columnDelimiter = reader.readUntyped(); + } else if ("rowDelimiter".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.rowDelimiter = reader.readUntyped(); + } else if ("encodingName".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.encodingName = reader.readUntyped(); + } else if ("compressionCodec".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.compressionCodec = reader.readUntyped(); + } else if ("compressionLevel".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.compressionLevel = reader.readUntyped(); + } else if ("quoteChar".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.quoteChar = reader.readUntyped(); + } else if ("escapeChar".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.escapeChar = reader.readUntyped(); + } else if ("firstRowAsHeader".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.firstRowAsHeader = reader.readUntyped(); + } else if ("nullValue".equals(fieldName)) { + deserializedDelimitedTextDatasetTypeProperties.nullValue = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDelimitedTextDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DocumentDbCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DocumentDbCollectionDatasetTypeProperties.java index d8a3785dbf5a..74827c5c504a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DocumentDbCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DocumentDbCollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * DocumentDB Collection dataset properties. */ @Fluent -public final class DocumentDbCollectionDatasetTypeProperties { +public final class DocumentDbCollectionDatasetTypeProperties + implements JsonSerializable { /* * Document Database collection name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collectionName", required = true) private Object collectionName; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DocumentDbCollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collectionName", this.collectionName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DocumentDbCollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DocumentDbCollectionDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DocumentDbCollectionDatasetTypeProperties. + */ + public static DocumentDbCollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DocumentDbCollectionDatasetTypeProperties deserializedDocumentDbCollectionDatasetTypeProperties + = new DocumentDbCollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collectionName".equals(fieldName)) { + deserializedDocumentDbCollectionDatasetTypeProperties.collectionName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDocumentDbCollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillDatasetTypeProperties.java index 6a4fd8be8aaf..273e73daeb7d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Drill Dataset Properties. */ @Fluent -public final class DrillDatasetTypeProperties { +public final class DrillDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Drill. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Drill. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public DrillDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DrillDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DrillDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DrillDatasetTypeProperties. + */ + public static DrillDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DrillDatasetTypeProperties deserializedDrillDatasetTypeProperties = new DrillDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedDrillDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedDrillDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedDrillDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDrillDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillLinkedServiceTypeProperties.java index 3d8c09bade0f..9432cd14854d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DrillLinkedServiceTypeProperties.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Drill server linked service properties. */ @Fluent -public final class DrillLinkedServiceTypeProperties { +public final class DrillLinkedServiceTypeProperties implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "pwd") private AzureKeyVaultSecretReference pwd; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +113,47 @@ public void validate() { pwd().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("pwd", this.pwd); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DrillLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DrillLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DrillLinkedServiceTypeProperties. + */ + public static DrillLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DrillLinkedServiceTypeProperties deserializedDrillLinkedServiceTypeProperties + = new DrillLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedDrillLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("pwd".equals(fieldName)) { + deserializedDrillLinkedServiceTypeProperties.pwd = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDrillLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDrillLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXLinkedServiceTypeProperties.java index 13f1ccf80e0b..d33e39b6f0ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Dynamics AX linked service properties. */ @Fluent -public final class DynamicsAXLinkedServiceTypeProperties { +public final class DynamicsAXLinkedServiceTypeProperties + implements JsonSerializable { /* * The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. */ - @JsonProperty(value = "url", required = true) private Object url; /* * Specify the application's client ID. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId", required = true) private Object servicePrincipalId; /* * Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or * reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalKey", required = true) private SecretBase servicePrincipalKey; /* @@ -38,20 +40,17 @@ public final class DynamicsAXLinkedServiceTypeProperties { * hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant", required = true) private Object tenant; /* * Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "aadResourceId", required = true) private Object aadResourceId; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -230,4 +229,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsAXLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("aadResourceId", this.aadResourceId); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsAXLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsAXLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsAXLinkedServiceTypeProperties. + */ + public static DynamicsAXLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsAXLinkedServiceTypeProperties deserializedDynamicsAXLinkedServiceTypeProperties + = new DynamicsAXLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("aadResourceId".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.aadResourceId = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDynamicsAXLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsAXLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXResourceDatasetTypeProperties.java index 9beb15d389a7..93954485e033 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsAXResourceDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Dynamics AX OData resource dataset properties. */ @Fluent -public final class DynamicsAXResourceDatasetTypeProperties { +public final class DynamicsAXResourceDatasetTypeProperties + implements JsonSerializable { /* * The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path", required = true) private Object path; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsAXResourceDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsAXResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsAXResourceDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsAXResourceDatasetTypeProperties. + */ + public static DynamicsAXResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsAXResourceDatasetTypeProperties deserializedDynamicsAXResourceDatasetTypeProperties + = new DynamicsAXResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("path".equals(fieldName)) { + deserializedDynamicsAXResourceDatasetTypeProperties.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsAXResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmEntityDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmEntityDatasetTypeProperties.java index d7f947da8151..7e0092ccedf6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmEntityDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmEntityDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Dynamics CRM entity dataset properties. */ @Fluent -public final class DynamicsCrmEntityDatasetTypeProperties { +public final class DynamicsCrmEntityDatasetTypeProperties + implements JsonSerializable { /* * The logical name of the entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "entityName") private Object entityName; /** @@ -51,4 +55,41 @@ public DynamicsCrmEntityDatasetTypeProperties withEntityName(Object entityName) */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("entityName", this.entityName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmEntityDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmEntityDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DynamicsCrmEntityDatasetTypeProperties. + */ + public static DynamicsCrmEntityDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmEntityDatasetTypeProperties deserializedDynamicsCrmEntityDatasetTypeProperties + = new DynamicsCrmEntityDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("entityName".equals(fieldName)) { + deserializedDynamicsCrmEntityDatasetTypeProperties.entityName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsCrmEntityDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmLinkedServiceTypeProperties.java index 5174c31a1d7d..76eb3cd62faa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsCrmLinkedServiceTypeProperties.java @@ -6,41 +6,42 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Dynamics CRM linked service properties. */ @Fluent -public final class DynamicsCrmLinkedServiceTypeProperties { +public final class DynamicsCrmLinkedServiceTypeProperties + implements JsonSerializable { /* * The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for * Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "deploymentType", required = true) private Object deploymentType; /* * The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for * online. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "hostName") private Object hostname; /* * The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. * Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUri") private Object serviceUri; /* @@ -48,41 +49,41 @@ public final class DynamicsCrmLinkedServiceTypeProperties { * when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "organizationName") private Object organizationName; /* * The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises - * with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - * Expression with resultType string). + * with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario, 'Active + * Directory' for Dynamics on-premises with IFD. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType", required = true) private Object authenticationType; + /* + * The Active Directory domain that will verify user credentials. Type: string (or Expression with resultType + * string). + */ + private Object domain; + /* * User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password to access the Dynamics CRM instance. */ - @JsonProperty(value = "password") private SecretBase password; /* * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -91,20 +92,17 @@ public final class DynamicsCrmLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -230,7 +228,8 @@ public DynamicsCrmLinkedServiceTypeProperties withOrganizationName(Object organi /** * Get the authenticationType property: The authentication type to connect to Dynamics CRM server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @return the authenticationType value. */ @@ -241,7 +240,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Dynamics CRM server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the DynamicsCrmLinkedServiceTypeProperties object itself. @@ -251,6 +251,28 @@ public DynamicsCrmLinkedServiceTypeProperties withAuthenticationType(Object auth return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.domain; + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the DynamicsCrmLinkedServiceTypeProperties object itself. + */ + public DynamicsCrmLinkedServiceTypeProperties withDomain(Object domain) { + this.domain = domain; + return this; + } + /** * Get the username property: User name to access the Dynamics CRM instance. Type: string (or Expression with * resultType string). @@ -437,4 +459,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsCrmLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("deploymentType", this.deploymentType); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("hostName", this.hostname); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("serviceUri", this.serviceUri); + jsonWriter.writeUntypedField("organizationName", this.organizationName); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsCrmLinkedServiceTypeProperties. + */ + public static DynamicsCrmLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmLinkedServiceTypeProperties deserializedDynamicsCrmLinkedServiceTypeProperties + = new DynamicsCrmLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("deploymentType".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.deploymentType = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("hostName".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.hostname = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("serviceUri".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.serviceUri = reader.readUntyped(); + } else if ("organizationName".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.organizationName = reader.readUntyped(); + } else if ("domain".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.domain = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("credential".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDynamicsCrmLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsCrmLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsEntityDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsEntityDatasetTypeProperties.java index c694c19de9f8..fd41fea64438 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsEntityDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsEntityDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Dynamics entity dataset properties. */ @Fluent -public final class DynamicsEntityDatasetTypeProperties { +public final class DynamicsEntityDatasetTypeProperties + implements JsonSerializable { /* * The logical name of the entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "entityName") private Object entityName; /** @@ -51,4 +55,41 @@ public DynamicsEntityDatasetTypeProperties withEntityName(Object entityName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("entityName", this.entityName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsEntityDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsEntityDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DynamicsEntityDatasetTypeProperties. + */ + public static DynamicsEntityDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsEntityDatasetTypeProperties deserializedDynamicsEntityDatasetTypeProperties + = new DynamicsEntityDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("entityName".equals(fieldName)) { + deserializedDynamicsEntityDatasetTypeProperties.entityName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsEntityDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsLinkedServiceTypeProperties.java index c376933ad81d..92855b679301 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/DynamicsLinkedServiceTypeProperties.java @@ -6,41 +6,42 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Dynamics linked service properties. */ @Fluent -public final class DynamicsLinkedServiceTypeProperties { +public final class DynamicsLinkedServiceTypeProperties + implements JsonSerializable { /* * The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics * on-premises with Ifd. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "deploymentType", required = true) private Object deploymentType; /* * The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for * online. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "hostName") private Object hostname; /* * The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default * is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUri") private Object serviceUri; /* @@ -48,41 +49,41 @@ public final class DynamicsLinkedServiceTypeProperties { * there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "organizationName") private Object organizationName; /* * The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - * with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - * Expression with resultType string). + * with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario, 'Active + * Directory' for Dynamics on-premises with IFD. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType", required = true) private Object authenticationType; + /* + * The Active Directory domain that will verify user credentials. Type: string (or Expression with resultType + * string). + */ + private Object domain; + /* * User name to access the Dynamics instance. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password to access the Dynamics instance. */ - @JsonProperty(value = "password") private SecretBase password; /* * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -91,20 +92,17 @@ public final class DynamicsLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -228,7 +226,8 @@ public DynamicsLinkedServiceTypeProperties withOrganizationName(Object organizat /** * Get the authenticationType property: The authentication type to connect to Dynamics server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @return the authenticationType value. */ @@ -239,7 +238,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Dynamics server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the DynamicsLinkedServiceTypeProperties object itself. @@ -249,6 +249,28 @@ public DynamicsLinkedServiceTypeProperties withAuthenticationType(Object authent return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.domain; + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the DynamicsLinkedServiceTypeProperties object itself. + */ + public DynamicsLinkedServiceTypeProperties withDomain(Object domain) { + this.domain = domain; + return this; + } + /** * Get the username property: User name to access the Dynamics instance. Type: string (or Expression with resultType * string). @@ -434,4 +456,83 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("deploymentType", this.deploymentType); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("hostName", this.hostname); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("serviceUri", this.serviceUri); + jsonWriter.writeUntypedField("organizationName", this.organizationName); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsLinkedServiceTypeProperties. + */ + public static DynamicsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsLinkedServiceTypeProperties deserializedDynamicsLinkedServiceTypeProperties + = new DynamicsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("deploymentType".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.deploymentType = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("hostName".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.hostname = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("serviceUri".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.serviceUri = reader.readUntyped(); + } else if ("organizationName".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.organizationName = reader.readUntyped(); + } else if ("domain".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.domain = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedDynamicsLinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDynamicsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EloquaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EloquaLinkedServiceTypeProperties.java index d6762aa2a64e..8ebc2f25b8a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EloquaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EloquaLinkedServiceTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Eloqua server linked service properties. */ @Fluent -public final class EloquaLinkedServiceTypeProperties { +public final class EloquaLinkedServiceTypeProperties implements JsonSerializable { /* * The endpoint of the Eloqua server. (i.e. eloqua.example.com) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) */ - @JsonProperty(value = "username", required = true) private Object username; /* * The password corresponding to the user name. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -236,4 +233,60 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(EloquaLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EloquaLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EloquaLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EloquaLinkedServiceTypeProperties. + */ + public static EloquaLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EloquaLinkedServiceTypeProperties deserializedEloquaLinkedServiceTypeProperties + = new EloquaLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedEloquaLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedEloquaLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EnvironmentVariableSetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EnvironmentVariableSetupTypeProperties.java index 543246e243c2..6b5ef140ad2b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EnvironmentVariableSetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/EnvironmentVariableSetupTypeProperties.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Environment variable custom setup type properties. */ @Fluent -public final class EnvironmentVariableSetupTypeProperties { +public final class EnvironmentVariableSetupTypeProperties + implements JsonSerializable { /* * The name of the environment variable. */ - @JsonProperty(value = "variableName", required = true) private String variableName; /* * The value of the environment variable. */ - @JsonProperty(value = "variableValue", required = true) private String variableValue; /** @@ -90,4 +93,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(EnvironmentVariableSetupTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("variableName", this.variableName); + jsonWriter.writeStringField("variableValue", this.variableValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EnvironmentVariableSetupTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EnvironmentVariableSetupTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EnvironmentVariableSetupTypeProperties. + */ + public static EnvironmentVariableSetupTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EnvironmentVariableSetupTypeProperties deserializedEnvironmentVariableSetupTypeProperties + = new EnvironmentVariableSetupTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("variableName".equals(fieldName)) { + deserializedEnvironmentVariableSetupTypeProperties.variableName = reader.getString(); + } else if ("variableValue".equals(fieldName)) { + deserializedEnvironmentVariableSetupTypeProperties.variableValue = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedEnvironmentVariableSetupTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExcelDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExcelDatasetTypeProperties.java index 49c35aef9083..2a626f0f136b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExcelDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExcelDatasetTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Excel dataset properties. */ @Fluent -public final class ExcelDatasetTypeProperties { +public final class ExcelDatasetTypeProperties implements JsonSerializable { /* * The location of the excel storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The sheet name of excel file. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sheetName") private Object sheetName; /* * The sheet index of excel file and default value is 0. Type: integer (or Expression with resultType integer) */ - @JsonProperty(value = "sheetIndex") private Object sheetIndex; /* * The partial data of one sheet. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "range") private Object range; /* * When used as input, treat the first row of data as headers. When used as output,write the headers into the output * as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "firstRowAsHeader") private Object firstRowAsHeader; /* * The data compression method used for the json dataset. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /* * The null value string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nullValue") private Object nullValue; /** @@ -229,4 +226,59 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExcelDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("sheetName", this.sheetName); + jsonWriter.writeUntypedField("sheetIndex", this.sheetIndex); + jsonWriter.writeUntypedField("range", this.range); + jsonWriter.writeUntypedField("firstRowAsHeader", this.firstRowAsHeader); + jsonWriter.writeJsonField("compression", this.compression); + jsonWriter.writeUntypedField("nullValue", this.nullValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExcelDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExcelDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExcelDatasetTypeProperties. + */ + public static ExcelDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExcelDatasetTypeProperties deserializedExcelDatasetTypeProperties = new ExcelDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("sheetName".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.sheetName = reader.readUntyped(); + } else if ("sheetIndex".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.sheetIndex = reader.readUntyped(); + } else if ("range".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.range = reader.readUntyped(); + } else if ("firstRowAsHeader".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.firstRowAsHeader = reader.readUntyped(); + } else if ("compression".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else if ("nullValue".equals(fieldName)) { + deserializedExcelDatasetTypeProperties.nullValue = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedExcelDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteDataFlowActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteDataFlowActivityTypeProperties.java index dd0b2200f263..0a054976cdc6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteDataFlowActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteDataFlowActivityTypeProperties.java @@ -6,74 +6,69 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ContinuationSettingsReference; import com.azure.resourcemanager.datafactory.models.DataFlowReference; import com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo; import com.azure.resourcemanager.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Execute data flow activity properties. */ @Fluent -public class ExecuteDataFlowActivityTypeProperties { +public class ExecuteDataFlowActivityTypeProperties implements JsonSerializable { /* * Data flow reference. */ - @JsonProperty(value = "dataFlow", required = true) private DataFlowReference dataFlow; /* * Staging info for execute data flow activity. */ - @JsonProperty(value = "staging") private DataFlowStagingInfo staging; /* * The integration runtime reference. */ - @JsonProperty(value = "integrationRuntime") private IntegrationRuntimeReference integrationRuntime; /* * Continuation settings for execute data flow activity. */ - @JsonProperty(value = "continuationSettings") private ContinuationSettingsReference continuationSettings; /* * Compute properties for data flow activity. */ - @JsonProperty(value = "compute") private ExecuteDataFlowActivityTypePropertiesCompute compute; /* * Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. * Type: string (or Expression with resultType string) */ - @JsonProperty(value = "traceLevel") private Object traceLevel; /* * Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: * boolean (or Expression with resultType boolean) */ - @JsonProperty(value = "continueOnError") private Object continueOnError; /* * Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed * concurrently. Type: boolean (or Expression with resultType boolean) */ - @JsonProperty(value = "runConcurrently") private Object runConcurrently; /* * Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with * resultType integer) */ - @JsonProperty(value = "sourceStagingConcurrency") private Object sourceStagingConcurrency; /** @@ -300,4 +295,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecuteDataFlowActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("dataFlow", this.dataFlow); + jsonWriter.writeJsonField("staging", this.staging); + jsonWriter.writeJsonField("integrationRuntime", this.integrationRuntime); + jsonWriter.writeJsonField("continuationSettings", this.continuationSettings); + jsonWriter.writeJsonField("compute", this.compute); + jsonWriter.writeUntypedField("traceLevel", this.traceLevel); + jsonWriter.writeUntypedField("continueOnError", this.continueOnError); + jsonWriter.writeUntypedField("runConcurrently", this.runConcurrently); + jsonWriter.writeUntypedField("sourceStagingConcurrency", this.sourceStagingConcurrency); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteDataFlowActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteDataFlowActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecuteDataFlowActivityTypeProperties. + */ + public static ExecuteDataFlowActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteDataFlowActivityTypeProperties deserializedExecuteDataFlowActivityTypeProperties + = new ExecuteDataFlowActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataFlow".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.dataFlow = DataFlowReference.fromJson(reader); + } else if ("staging".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.staging = DataFlowStagingInfo.fromJson(reader); + } else if ("integrationRuntime".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.integrationRuntime + = IntegrationRuntimeReference.fromJson(reader); + } else if ("continuationSettings".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.continuationSettings + = ContinuationSettingsReference.fromJson(reader); + } else if ("compute".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.compute + = ExecuteDataFlowActivityTypePropertiesCompute.fromJson(reader); + } else if ("traceLevel".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.traceLevel = reader.readUntyped(); + } else if ("continueOnError".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.continueOnError = reader.readUntyped(); + } else if ("runConcurrently".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.runConcurrently = reader.readUntyped(); + } else if ("sourceStagingConcurrency".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypeProperties.sourceStagingConcurrency = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedExecuteDataFlowActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePipelineActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePipelineActivityTypeProperties.java index 5223c77abefb..ec10fcec2646 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePipelineActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePipelineActivityTypeProperties.java @@ -6,33 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Execute pipeline activity properties. */ @Fluent -public final class ExecutePipelineActivityTypeProperties { +public final class ExecutePipelineActivityTypeProperties + implements JsonSerializable { /* * Pipeline reference. */ - @JsonProperty(value = "pipeline", required = true) private PipelineReference pipeline; /* * Pipeline parameters. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. */ - @JsonProperty(value = "waitOnCompletion") private Boolean waitOnCompletion; /** @@ -119,4 +119,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecutePipelineActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("pipeline", this.pipeline); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeBooleanField("waitOnCompletion", this.waitOnCompletion); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecutePipelineActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecutePipelineActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecutePipelineActivityTypeProperties. + */ + public static ExecutePipelineActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecutePipelineActivityTypeProperties deserializedExecutePipelineActivityTypeProperties + = new ExecutePipelineActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("pipeline".equals(fieldName)) { + deserializedExecutePipelineActivityTypeProperties.pipeline = PipelineReference.fromJson(reader); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedExecutePipelineActivityTypeProperties.parameters = parameters; + } else if ("waitOnCompletion".equals(fieldName)) { + deserializedExecutePipelineActivityTypeProperties.waitOnCompletion + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedExecutePipelineActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePowerQueryActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePowerQueryActivityTypeProperties.java index b4e99ed600bb..33475bf2c7ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePowerQueryActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecutePowerQueryActivityTypeProperties.java @@ -5,6 +5,9 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ContinuationSettingsReference; import com.azure.resourcemanager.datafactory.models.DataFlowReference; import com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo; @@ -12,8 +15,7 @@ import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; import com.azure.resourcemanager.datafactory.models.PowerQuerySink; import com.azure.resourcemanager.datafactory.models.PowerQuerySinkMapping; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -25,14 +27,11 @@ public final class ExecutePowerQueryActivityTypeProperties extends ExecuteDataFl /* * (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. */ - @JsonProperty(value = "sinks") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map sinks; /* * List of mapping for Power Query mashup query to sink dataset(s). */ - @JsonProperty(value = "queries") private List queries; /** @@ -185,4 +184,81 @@ public void validate() { queries().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("dataFlow", dataFlow()); + jsonWriter.writeJsonField("staging", staging()); + jsonWriter.writeJsonField("integrationRuntime", integrationRuntime()); + jsonWriter.writeJsonField("continuationSettings", continuationSettings()); + jsonWriter.writeJsonField("compute", compute()); + jsonWriter.writeUntypedField("traceLevel", traceLevel()); + jsonWriter.writeUntypedField("continueOnError", continueOnError()); + jsonWriter.writeUntypedField("runConcurrently", runConcurrently()); + jsonWriter.writeUntypedField("sourceStagingConcurrency", sourceStagingConcurrency()); + jsonWriter.writeMapField("sinks", this.sinks, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("queries", this.queries, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecutePowerQueryActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecutePowerQueryActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecutePowerQueryActivityTypeProperties. + */ + public static ExecutePowerQueryActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecutePowerQueryActivityTypeProperties deserializedExecutePowerQueryActivityTypeProperties + = new ExecutePowerQueryActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataFlow".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withDataFlow(DataFlowReference.fromJson(reader)); + } else if ("staging".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withStaging(DataFlowStagingInfo.fromJson(reader)); + } else if ("integrationRuntime".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withIntegrationRuntime(IntegrationRuntimeReference.fromJson(reader)); + } else if ("continuationSettings".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withContinuationSettings(ContinuationSettingsReference.fromJson(reader)); + } else if ("compute".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withCompute(ExecuteDataFlowActivityTypePropertiesCompute.fromJson(reader)); + } else if ("traceLevel".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties.withTraceLevel(reader.readUntyped()); + } else if ("continueOnError".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties.withContinueOnError(reader.readUntyped()); + } else if ("runConcurrently".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties.withRunConcurrently(reader.readUntyped()); + } else if ("sourceStagingConcurrency".equals(fieldName)) { + deserializedExecutePowerQueryActivityTypeProperties + .withSourceStagingConcurrency(reader.readUntyped()); + } else if ("sinks".equals(fieldName)) { + Map sinks = reader.readMap(reader1 -> PowerQuerySink.fromJson(reader1)); + deserializedExecutePowerQueryActivityTypeProperties.sinks = sinks; + } else if ("queries".equals(fieldName)) { + List queries + = reader.readArray(reader1 -> PowerQuerySinkMapping.fromJson(reader1)); + deserializedExecutePowerQueryActivityTypeProperties.queries = queries; + } else { + reader.skipChildren(); + } + } + + return deserializedExecutePowerQueryActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteSsisPackageActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteSsisPackageActivityTypeProperties.java index 55e3a97e64f5..7891fbde1933 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteSsisPackageActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExecuteSsisPackageActivityTypeProperties.java @@ -6,97 +6,84 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; import com.azure.resourcemanager.datafactory.models.SsisExecutionCredential; import com.azure.resourcemanager.datafactory.models.SsisExecutionParameter; import com.azure.resourcemanager.datafactory.models.SsisLogLocation; import com.azure.resourcemanager.datafactory.models.SsisPackageLocation; import com.azure.resourcemanager.datafactory.models.SsisPropertyOverride; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Execute SSIS package activity properties. */ @Fluent -public final class ExecuteSsisPackageActivityTypeProperties { +public final class ExecuteSsisPackageActivityTypeProperties + implements JsonSerializable { /* * SSIS package location. */ - @JsonProperty(value = "packageLocation", required = true) private SsisPackageLocation packageLocation; /* * Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "runtime") private Object runtime; /* * The logging level of SSIS package execution. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "loggingLevel") private Object loggingLevel; /* * The environment path to execute the SSIS package. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "environmentPath") private Object environmentPath; /* * The package execution credential. */ - @JsonProperty(value = "executionCredential") private SsisExecutionCredential executionCredential; /* * The integration runtime reference. */ - @JsonProperty(value = "connectVia", required = true) private IntegrationRuntimeReference connectVia; /* * The project level parameters to execute the SSIS package. */ - @JsonProperty(value = "projectParameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map projectParameters; /* * The package level parameters to execute the SSIS package. */ - @JsonProperty(value = "packageParameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map packageParameters; /* * The project level connection managers to execute the SSIS package. */ - @JsonProperty(value = "projectConnectionManagers") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map> projectConnectionManagers; /* * The package level connection managers to execute the SSIS package. */ - @JsonProperty(value = "packageConnectionManagers") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map> packageConnectionManagers; /* * The property overrides to execute the SSIS package. */ - @JsonProperty(value = "propertyOverrides") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map propertyOverrides; /* * SSIS package execution log location. */ - @JsonProperty(value = "logLocation") private SsisLogLocation logLocation; /** @@ -429,4 +416,95 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecuteSsisPackageActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("packageLocation", this.packageLocation); + jsonWriter.writeJsonField("connectVia", this.connectVia); + jsonWriter.writeUntypedField("runtime", this.runtime); + jsonWriter.writeUntypedField("loggingLevel", this.loggingLevel); + jsonWriter.writeUntypedField("environmentPath", this.environmentPath); + jsonWriter.writeJsonField("executionCredential", this.executionCredential); + jsonWriter.writeMapField("projectParameters", this.projectParameters, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("packageParameters", this.packageParameters, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("projectConnectionManagers", this.projectConnectionManagers, + (writer, element) -> writer.writeMap(element, (writer1, element1) -> writer1.writeJson(element1))); + jsonWriter.writeMapField("packageConnectionManagers", this.packageConnectionManagers, + (writer, element) -> writer.writeMap(element, (writer1, element1) -> writer1.writeJson(element1))); + jsonWriter.writeMapField("propertyOverrides", this.propertyOverrides, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("logLocation", this.logLocation); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteSsisPackageActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteSsisPackageActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecuteSsisPackageActivityTypeProperties. + */ + public static ExecuteSsisPackageActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteSsisPackageActivityTypeProperties deserializedExecuteSsisPackageActivityTypeProperties + = new ExecuteSsisPackageActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("packageLocation".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.packageLocation + = SsisPackageLocation.fromJson(reader); + } else if ("connectVia".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.connectVia + = IntegrationRuntimeReference.fromJson(reader); + } else if ("runtime".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.runtime = reader.readUntyped(); + } else if ("loggingLevel".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.loggingLevel = reader.readUntyped(); + } else if ("environmentPath".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.environmentPath = reader.readUntyped(); + } else if ("executionCredential".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.executionCredential + = SsisExecutionCredential.fromJson(reader); + } else if ("projectParameters".equals(fieldName)) { + Map projectParameters + = reader.readMap(reader1 -> SsisExecutionParameter.fromJson(reader1)); + deserializedExecuteSsisPackageActivityTypeProperties.projectParameters = projectParameters; + } else if ("packageParameters".equals(fieldName)) { + Map packageParameters + = reader.readMap(reader1 -> SsisExecutionParameter.fromJson(reader1)); + deserializedExecuteSsisPackageActivityTypeProperties.packageParameters = packageParameters; + } else if ("projectConnectionManagers".equals(fieldName)) { + Map> projectConnectionManagers = reader + .readMap(reader1 -> reader1.readMap(reader2 -> SsisExecutionParameter.fromJson(reader2))); + deserializedExecuteSsisPackageActivityTypeProperties.projectConnectionManagers + = projectConnectionManagers; + } else if ("packageConnectionManagers".equals(fieldName)) { + Map> packageConnectionManagers = reader + .readMap(reader1 -> reader1.readMap(reader2 -> SsisExecutionParameter.fromJson(reader2))); + deserializedExecuteSsisPackageActivityTypeProperties.packageConnectionManagers + = packageConnectionManagers; + } else if ("propertyOverrides".equals(fieldName)) { + Map propertyOverrides + = reader.readMap(reader1 -> SsisPropertyOverride.fromJson(reader1)); + deserializedExecuteSsisPackageActivityTypeProperties.propertyOverrides = propertyOverrides; + } else if ("logLocation".equals(fieldName)) { + deserializedExecuteSsisPackageActivityTypeProperties.logLocation = SsisLogLocation.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedExecuteSsisPackageActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlBatchResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlBatchResponseInner.java index a2a510bc66aa..13af875ecafa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlBatchResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlBatchResponseInner.java @@ -6,18 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A list of exposure control feature values. */ @Fluent -public final class ExposureControlBatchResponseInner { +public final class ExposureControlBatchResponseInner implements JsonSerializable { /* * List of exposure control feature values. */ - @JsonProperty(value = "exposureControlResponses", required = true) private List exposureControlResponses; /** @@ -63,4 +66,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExposureControlBatchResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("exposureControlResponses", this.exposureControlResponses, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExposureControlBatchResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExposureControlBatchResponseInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExposureControlBatchResponseInner. + */ + public static ExposureControlBatchResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExposureControlBatchResponseInner deserializedExposureControlBatchResponseInner + = new ExposureControlBatchResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("exposureControlResponses".equals(fieldName)) { + List exposureControlResponses + = reader.readArray(reader1 -> ExposureControlResponseInner.fromJson(reader1)); + deserializedExposureControlBatchResponseInner.exposureControlResponses = exposureControlResponses; + } else { + reader.skipChildren(); + } + } + + return deserializedExposureControlBatchResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlResponseInner.java index de31b1a9cdc3..d915675f2e52 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ExposureControlResponseInner.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The exposure control response. */ @Immutable -public final class ExposureControlResponseInner { +public final class ExposureControlResponseInner implements JsonSerializable { /* * The feature name. */ - @JsonProperty(value = "featureName", access = JsonProperty.Access.WRITE_ONLY) private String featureName; /* * The feature value. */ - @JsonProperty(value = "value", access = JsonProperty.Access.WRITE_ONLY) private String value; /** @@ -55,4 +57,41 @@ public String value() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExposureControlResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExposureControlResponseInner if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ExposureControlResponseInner. + */ + public static ExposureControlResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExposureControlResponseInner deserializedExposureControlResponseInner = new ExposureControlResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("featureName".equals(fieldName)) { + deserializedExposureControlResponseInner.featureName = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedExposureControlResponseInner.value = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedExposureControlResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryInner.java index cc25f89eb6be..3357e534d23b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryInner.java @@ -6,16 +6,16 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.Resource; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.EncryptionConfiguration; import com.azure.resourcemanager.datafactory.models.FactoryIdentity; import com.azure.resourcemanager.datafactory.models.FactoryRepoConfiguration; import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; import com.azure.resourcemanager.datafactory.models.PublicNetworkAccess; import com.azure.resourcemanager.datafactory.models.PurviewConfiguration; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.Map; @@ -28,27 +28,38 @@ public final class FactoryInner extends Resource { /* * Managed service identity of the factory. */ - @JsonProperty(value = "identity") private FactoryIdentity identity; /* * Properties of the factory. */ - @JsonProperty(value = "properties") private FactoryProperties innerProperties; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "eTag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /* * Factory resource type. */ - @JsonIgnore private Map additionalProperties; + /* + * Fully qualified resource Id for the resource. + */ + private String id; + + /* + * The name of the resource. + */ + private String name; + + /* + * The type of the resource. + */ + private String type; + /** * Creates an instance of FactoryInner class. */ @@ -98,7 +109,6 @@ public String etag() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -114,12 +124,34 @@ public FactoryInner withAdditionalProperties(Map additionalPrope return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); + /** + * Get the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + @Override + public String id() { + return this.id; + } + + /** + * Get the name property: The name of the resource. + * + * @return the name value. + */ + @Override + public String name() { + return this.name; + } + + /** + * Get the type property: The type of the resource. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; } /** @@ -295,4 +327,70 @@ public void validate() { innerProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("location", location()); + jsonWriter.writeMapField("tags", tags(), (writer, element) -> writer.writeString(element)); + jsonWriter.writeJsonField("identity", this.identity); + jsonWriter.writeJsonField("properties", this.innerProperties); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryInner if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryInner. + */ + public static FactoryInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryInner deserializedFactoryInner = new FactoryInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedFactoryInner.id = reader.getString(); + } else if ("name".equals(fieldName)) { + deserializedFactoryInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedFactoryInner.type = reader.getString(); + } else if ("location".equals(fieldName)) { + deserializedFactoryInner.withLocation(reader.getString()); + } else if ("tags".equals(fieldName)) { + Map tags = reader.readMap(reader1 -> reader1.getString()); + deserializedFactoryInner.withTags(tags); + } else if ("identity".equals(fieldName)) { + deserializedFactoryInner.identity = FactoryIdentity.fromJson(reader); + } else if ("properties".equals(fieldName)) { + deserializedFactoryInner.innerProperties = FactoryProperties.fromJson(reader); + } else if ("eTag".equals(fieldName)) { + deserializedFactoryInner.etag = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFactoryInner.additionalProperties = additionalProperties; + + return deserializedFactoryInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryProperties.java index ae294b942c75..8989e95097a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryProperties.java @@ -5,13 +5,17 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.EncryptionConfiguration; import com.azure.resourcemanager.datafactory.models.FactoryRepoConfiguration; import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; import com.azure.resourcemanager.datafactory.models.PublicNetworkAccess; import com.azure.resourcemanager.datafactory.models.PurviewConfiguration; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.Map; @@ -19,54 +23,45 @@ * Factory resource properties. */ @Fluent -public final class FactoryProperties { +public final class FactoryProperties implements JsonSerializable { /* * Factory provisioning state, example Succeeded. */ - @JsonProperty(value = "provisioningState", access = JsonProperty.Access.WRITE_ONLY) private String provisioningState; /* * Time the factory was created in ISO8601 format. */ - @JsonProperty(value = "createTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime createTime; /* * Version of the factory. */ - @JsonProperty(value = "version", access = JsonProperty.Access.WRITE_ONLY) private String version; /* * Purview information of the factory. */ - @JsonProperty(value = "purviewConfiguration") private PurviewConfiguration purviewConfiguration; /* * Git repo information of the factory. */ - @JsonProperty(value = "repoConfiguration") private FactoryRepoConfiguration repoConfiguration; /* * List of parameters for factory. */ - @JsonProperty(value = "globalParameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map globalParameters; /* * Properties to enable Customer Managed Key for the factory. */ - @JsonProperty(value = "encryption") private EncryptionConfiguration encryption; /* * Whether or not public network access is allowed for the data factory. */ - @JsonProperty(value = "publicNetworkAccess") private PublicNetworkAccess publicNetworkAccess; /** @@ -225,4 +220,64 @@ public void validate() { encryption().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("purviewConfiguration", this.purviewConfiguration); + jsonWriter.writeJsonField("repoConfiguration", this.repoConfiguration); + jsonWriter.writeMapField("globalParameters", this.globalParameters, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("encryption", this.encryption); + jsonWriter.writeStringField("publicNetworkAccess", + this.publicNetworkAccess == null ? null : this.publicNetworkAccess.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryProperties if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FactoryProperties. + */ + public static FactoryProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryProperties deserializedFactoryProperties = new FactoryProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("provisioningState".equals(fieldName)) { + deserializedFactoryProperties.provisioningState = reader.getString(); + } else if ("createTime".equals(fieldName)) { + deserializedFactoryProperties.createTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("version".equals(fieldName)) { + deserializedFactoryProperties.version = reader.getString(); + } else if ("purviewConfiguration".equals(fieldName)) { + deserializedFactoryProperties.purviewConfiguration = PurviewConfiguration.fromJson(reader); + } else if ("repoConfiguration".equals(fieldName)) { + deserializedFactoryProperties.repoConfiguration = FactoryRepoConfiguration.fromJson(reader); + } else if ("globalParameters".equals(fieldName)) { + Map globalParameters + = reader.readMap(reader1 -> GlobalParameterSpecification.fromJson(reader1)); + deserializedFactoryProperties.globalParameters = globalParameters; + } else if ("encryption".equals(fieldName)) { + deserializedFactoryProperties.encryption = EncryptionConfiguration.fromJson(reader); + } else if ("publicNetworkAccess".equals(fieldName)) { + deserializedFactoryProperties.publicNetworkAccess + = PublicNetworkAccess.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryUpdateProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryUpdateProperties.java index d655ebe35d54..85bf9d2dd078 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryUpdateProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FactoryUpdateProperties.java @@ -5,18 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PublicNetworkAccess; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Factory update resource properties. */ @Fluent -public final class FactoryUpdateProperties { +public final class FactoryUpdateProperties implements JsonSerializable { /* * Whether or not public network access is allowed for the data factory. */ - @JsonProperty(value = "publicNetworkAccess") private PublicNetworkAccess publicNetworkAccess; /** @@ -52,4 +55,42 @@ public FactoryUpdateProperties withPublicNetworkAccess(PublicNetworkAccess publi */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("publicNetworkAccess", + this.publicNetworkAccess == null ? null : this.publicNetworkAccess.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryUpdateProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryUpdateProperties if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the FactoryUpdateProperties. + */ + public static FactoryUpdateProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryUpdateProperties deserializedFactoryUpdateProperties = new FactoryUpdateProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("publicNetworkAccess".equals(fieldName)) { + deserializedFactoryUpdateProperties.publicNetworkAccess + = PublicNetworkAccess.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryUpdateProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java index cee932969cf9..74da9bfc3f2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FailActivityTypeProperties.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Fail activity properties. */ @Fluent -public final class FailActivityTypeProperties { +public final class FailActivityTypeProperties implements JsonSerializable { /* * The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non * empty/blank string at runtime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "message", required = true) private Object message; /* * The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated * to a non empty/blank string at runtime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "errorCode", required = true) private Object errorCode; /** @@ -98,4 +100,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FailActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("message", this.message); + jsonWriter.writeUntypedField("errorCode", this.errorCode); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FailActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FailActivityTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FailActivityTypeProperties. + */ + public static FailActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FailActivityTypeProperties deserializedFailActivityTypeProperties = new FailActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("message".equals(fieldName)) { + deserializedFailActivityTypeProperties.message = reader.readUntyped(); + } else if ("errorCode".equals(fieldName)) { + deserializedFailActivityTypeProperties.errorCode = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedFailActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java index a1dd4dec56cc..8a0de38071ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileServerLinkedServiceTypeProperties.java @@ -6,37 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * File system linked service properties. */ @Fluent -public final class FileServerLinkedServiceTypeProperties { +public final class FileServerLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name of the server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "host", required = true) private Object host; /* * User ID to logon the server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userId") private Object userId; /* * Password to logon the server. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -144,4 +145,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FileServerLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("userId", this.userId); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileServerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileServerLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FileServerLinkedServiceTypeProperties. + */ + public static FileServerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileServerLinkedServiceTypeProperties deserializedFileServerLinkedServiceTypeProperties + = new FileServerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedFileServerLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("userId".equals(fieldName)) { + deserializedFileServerLinkedServiceTypeProperties.userId = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedFileServerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedFileServerLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedFileServerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java index 43eb8408e972..6d5407470501 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FileShareDatasetTypeProperties.java @@ -5,56 +5,53 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * On-premises file system dataset properties. */ @Fluent -public final class FileShareDatasetTypeProperties { +public final class FileShareDatasetTypeProperties implements JsonSerializable { /* * The path of the on-premises file system. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * The name of the on-premises file system. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileName") private Object fileName; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * The format of the files. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "fileFilter") private Object fileFilter; /* * The data compression method used for the file system. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -226,4 +223,59 @@ public void validate() { compression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeUntypedField("fileName", this.fileName); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeUntypedField("fileFilter", this.fileFilter); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileShareDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileShareDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the FileShareDatasetTypeProperties. + */ + public static FileShareDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileShareDatasetTypeProperties deserializedFileShareDatasetTypeProperties + = new FileShareDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.folderPath = reader.readUntyped(); + } else if ("fileName".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.fileName = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("fileFilter".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.fileFilter = reader.readUntyped(); + } else if ("compression".equals(fieldName)) { + deserializedFileShareDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFileShareDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java index 687079add0b1..6e8d3b673cc2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FilterActivityTypeProperties.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Expression; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Filter activity properties. */ @Fluent -public final class FilterActivityTypeProperties { +public final class FilterActivityTypeProperties implements JsonSerializable { /* * Input array on which filter should be applied. */ - @JsonProperty(value = "items", required = true) private Expression items; /* * Condition to be used for filtering the input. */ - @JsonProperty(value = "condition", required = true) private Expression condition; /** @@ -95,4 +97,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FilterActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("items", this.items); + jsonWriter.writeJsonField("condition", this.condition); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FilterActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FilterActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FilterActivityTypeProperties. + */ + public static FilterActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FilterActivityTypeProperties deserializedFilterActivityTypeProperties = new FilterActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("items".equals(fieldName)) { + deserializedFilterActivityTypeProperties.items = Expression.fromJson(reader); + } else if ("condition".equals(fieldName)) { + deserializedFilterActivityTypeProperties.condition = Expression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFilterActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FlowletTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FlowletTypeProperties.java index 97239908f77f..10f67a953cde 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FlowletTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FlowletTypeProperties.java @@ -5,45 +5,44 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DataFlowSink; import com.azure.resourcemanager.datafactory.models.DataFlowSource; import com.azure.resourcemanager.datafactory.models.Transformation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Flowlet type properties. */ @Fluent -public final class FlowletTypeProperties { +public final class FlowletTypeProperties implements JsonSerializable { /* * List of sources in Flowlet. */ - @JsonProperty(value = "sources") private List sources; /* * List of sinks in Flowlet. */ - @JsonProperty(value = "sinks") private List sinks; /* * List of transformations in Flowlet. */ - @JsonProperty(value = "transformations") private List transformations; /* * Flowlet script. */ - @JsonProperty(value = "script") private String script; /* * Flowlet script lines. */ - @JsonProperty(value = "scriptLines") private List scriptLines; /** @@ -168,4 +167,58 @@ public void validate() { transformations().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sources", this.sources, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("sinks", this.sinks, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("transformations", this.transformations, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("script", this.script); + jsonWriter.writeArrayField("scriptLines", this.scriptLines, (writer, element) -> writer.writeString(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FlowletTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FlowletTypeProperties if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the FlowletTypeProperties. + */ + public static FlowletTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FlowletTypeProperties deserializedFlowletTypeProperties = new FlowletTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sources".equals(fieldName)) { + List sources = reader.readArray(reader1 -> DataFlowSource.fromJson(reader1)); + deserializedFlowletTypeProperties.sources = sources; + } else if ("sinks".equals(fieldName)) { + List sinks = reader.readArray(reader1 -> DataFlowSink.fromJson(reader1)); + deserializedFlowletTypeProperties.sinks = sinks; + } else if ("transformations".equals(fieldName)) { + List transformations + = reader.readArray(reader1 -> Transformation.fromJson(reader1)); + deserializedFlowletTypeProperties.transformations = transformations; + } else if ("script".equals(fieldName)) { + deserializedFlowletTypeProperties.script = reader.getString(); + } else if ("scriptLines".equals(fieldName)) { + List scriptLines = reader.readArray(reader1 -> reader1.getString()); + deserializedFlowletTypeProperties.scriptLines = scriptLines; + } else { + reader.skipChildren(); + } + } + + return deserializedFlowletTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java index 0311ed0a8c65..a38a671aab3a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ForEachActivityTypeProperties.java @@ -6,38 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.Expression; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * ForEach activity properties. */ @Fluent -public final class ForEachActivityTypeProperties { +public final class ForEachActivityTypeProperties implements JsonSerializable { /* * Should the loop be executed in sequence or in parallel (max 50) */ - @JsonProperty(value = "isSequential") private Boolean isSequential; /* * Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). */ - @JsonProperty(value = "batchCount") private Integer batchCount; /* * Collection to iterate. */ - @JsonProperty(value = "items", required = true) private Expression items; /* * List of activities to execute . */ - @JsonProperty(value = "activities", required = true) private List activities; /** @@ -151,4 +151,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ForEachActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("items", this.items); + jsonWriter.writeArrayField("activities", this.activities, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeBooleanField("isSequential", this.isSequential); + jsonWriter.writeNumberField("batchCount", this.batchCount); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ForEachActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ForEachActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ForEachActivityTypeProperties. + */ + public static ForEachActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ForEachActivityTypeProperties deserializedForEachActivityTypeProperties + = new ForEachActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("items".equals(fieldName)) { + deserializedForEachActivityTypeProperties.items = Expression.fromJson(reader); + } else if ("activities".equals(fieldName)) { + List activities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedForEachActivityTypeProperties.activities = activities; + } else if ("isSequential".equals(fieldName)) { + deserializedForEachActivityTypeProperties.isSequential = reader.getNullable(JsonReader::getBoolean); + } else if ("batchCount".equals(fieldName)) { + deserializedForEachActivityTypeProperties.batchCount = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedForEachActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java index 5fdda137e169..c21b2d740e19 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/FtpServerLinkedServiceTypeProperties.java @@ -6,65 +6,62 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.FtpAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class FtpServerLinkedServiceTypeProperties { +public final class FtpServerLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name of the FTP server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer * (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * The authentication type to be used to connect to the FTP server. */ - @JsonProperty(value = "authenticationType") private FtpAuthenticationType authenticationType; /* * Username to logon the FTP server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to logon the FTP server. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* * If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; /** @@ -259,4 +256,66 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FtpServerLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("enableServerCertificateValidation", this.enableServerCertificateValidation); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FtpServerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FtpServerLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FtpServerLinkedServiceTypeProperties. + */ + public static FtpServerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FtpServerLinkedServiceTypeProperties deserializedFtpServerLinkedServiceTypeProperties + = new FtpServerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.authenticationType + = FtpAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("enableSsl".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("enableServerCertificateValidation".equals(fieldName)) { + deserializedFtpServerLinkedServiceTypeProperties.enableServerCertificateValidation + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedFtpServerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GenericDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GenericDatasetTypeProperties.java index 00022f5ec16b..9ee221f7670e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GenericDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GenericDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Properties specific to this dataset type. */ @Fluent -public final class GenericDatasetTypeProperties { +public final class GenericDatasetTypeProperties implements JsonSerializable { /* * The table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +54,40 @@ public GenericDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GenericDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GenericDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GenericDatasetTypeProperties. + */ + public static GenericDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GenericDatasetTypeProperties deserializedGenericDatasetTypeProperties = new GenericDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedGenericDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedGenericDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java index 0ee85a73e38a..6fe308e5a1d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GetMetadataActivityTypeProperties.java @@ -6,39 +6,39 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetReference; import com.azure.resourcemanager.datafactory.models.FormatReadSettings; import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * GetMetadata activity properties. */ @Fluent -public final class GetMetadataActivityTypeProperties { +public final class GetMetadataActivityTypeProperties implements JsonSerializable { /* * GetMetadata activity dataset reference. */ - @JsonProperty(value = "dataset", required = true) private DatasetReference dataset; /* * Fields of metadata to get from dataset. */ - @JsonProperty(value = "fieldList") private List fieldList; /* * GetMetadata activity store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * GetMetadata activity format settings. */ - @JsonProperty(value = "formatSettings") private FormatReadSettings formatSettings; /** @@ -149,4 +149,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GetMetadataActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("dataset", this.dataset); + jsonWriter.writeArrayField("fieldList", this.fieldList, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GetMetadataActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GetMetadataActivityTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GetMetadataActivityTypeProperties. + */ + public static GetMetadataActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GetMetadataActivityTypeProperties deserializedGetMetadataActivityTypeProperties + = new GetMetadataActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataset".equals(fieldName)) { + deserializedGetMetadataActivityTypeProperties.dataset = DatasetReference.fromJson(reader); + } else if ("fieldList".equals(fieldName)) { + List fieldList = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGetMetadataActivityTypeProperties.fieldList = fieldList; + } else if ("storeSettings".equals(fieldName)) { + deserializedGetMetadataActivityTypeProperties.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedGetMetadataActivityTypeProperties.formatSettings = FormatReadSettings.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedGetMetadataActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GitHubAccessTokenResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GitHubAccessTokenResponseInner.java index 29db9d1b8500..435000461da0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GitHubAccessTokenResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GitHubAccessTokenResponseInner.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Get GitHub access token response definition. */ @Fluent -public final class GitHubAccessTokenResponseInner { +public final class GitHubAccessTokenResponseInner implements JsonSerializable { /* * GitHub access token. */ - @JsonProperty(value = "gitHubAccessToken") private String gitHubAccessToken; /** @@ -51,4 +54,41 @@ public GitHubAccessTokenResponseInner withGitHubAccessToken(String gitHubAccessT */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("gitHubAccessToken", this.gitHubAccessToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GitHubAccessTokenResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GitHubAccessTokenResponseInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GitHubAccessTokenResponseInner. + */ + public static GitHubAccessTokenResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GitHubAccessTokenResponseInner deserializedGitHubAccessTokenResponseInner + = new GitHubAccessTokenResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("gitHubAccessToken".equals(fieldName)) { + deserializedGitHubAccessTokenResponseInner.gitHubAccessToken = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGitHubAccessTokenResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java index 8bffe96175ce..3644122f3dec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GlobalParameterResourceInner.java @@ -7,9 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** @@ -20,26 +22,21 @@ public final class GlobalParameterResourceInner extends SubResource { /* * Properties of the global parameter. */ - @JsonProperty(value = "properties", required = true) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -124,4 +121,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GlobalParameterResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeMapField("properties", this.properties, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GlobalParameterResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GlobalParameterResourceInner if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GlobalParameterResourceInner. + */ + public static GlobalParameterResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GlobalParameterResourceInner deserializedGlobalParameterResourceInner = new GlobalParameterResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedGlobalParameterResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + Map properties + = reader.readMap(reader1 -> GlobalParameterSpecification.fromJson(reader1)); + deserializedGlobalParameterResourceInner.properties = properties; + } else if ("name".equals(fieldName)) { + deserializedGlobalParameterResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedGlobalParameterResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedGlobalParameterResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGlobalParameterResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java index c0f12b3a7ce7..98d4f3d02e6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleAdWordsLinkedServiceTypeProperties.java @@ -5,73 +5,69 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.GoogleAdWordsAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Google AdWords service linked service properties. */ @Fluent -public final class GoogleAdWordsLinkedServiceTypeProperties { +public final class GoogleAdWordsLinkedServiceTypeProperties + implements JsonSerializable { /* * (Deprecated) Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the * linked service. Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The Client customer ID of the AdWords account that you want to fetch report data for. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "clientCustomerID") private Object clientCustomerId; /* * The developer token associated with the manager account that you use to grant access to the AdWords API. */ - @JsonProperty(value = "developerToken") private SecretBase developerToken; /* * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on * self-hosted IR. */ - @JsonProperty(value = "authenticationType") private GoogleAdWordsAuthenticationType authenticationType; /* * The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. */ - @JsonProperty(value = "refreshToken") private SecretBase refreshToken; /* * The client id of the google application used to acquire the refresh token. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret of the google application used to acquire the refresh token. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "email") private Object email; /* * (Deprecated) The full path to the .p12 key file that is used to authenticate the service account email address * and can only be used on self-hosted IR. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "keyFilePath") private Object keyFilePath; /* @@ -79,28 +75,24 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { * connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the * cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * (Deprecated) Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. * The default value is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * The private key that is used to authenticate the service account email address and can only be used on * self-hosted IR. */ - @JsonProperty(value = "privateKey") private SecretBase privateKey; /* * The customer ID of the Google Ads Manager account through which you want to fetch report data of specific * Customer. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "loginCustomerID") private Object loginCustomerId; /* @@ -108,7 +100,6 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { * https://developers.google.com/google-ads/api/docs/release-notes. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "googleAdsApiVersion") private Object googleAdsApiVersion; /* @@ -116,14 +107,12 @@ public final class GoogleAdWordsLinkedServiceTypeProperties { * Do not set this to true unless you want to keep backward compatibility with legacy driver's data type mappings. * Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "supportLegacyDataTypes") private Object supportLegacyDataTypes; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -514,4 +503,88 @@ public void validate() { privateKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("clientCustomerID", this.clientCustomerId); + jsonWriter.writeJsonField("developerToken", this.developerToken); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeJsonField("refreshToken", this.refreshToken); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("email", this.email); + jsonWriter.writeUntypedField("keyFilePath", this.keyFilePath); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeJsonField("privateKey", this.privateKey); + jsonWriter.writeUntypedField("loginCustomerID", this.loginCustomerId); + jsonWriter.writeUntypedField("googleAdsApiVersion", this.googleAdsApiVersion); + jsonWriter.writeUntypedField("supportLegacyDataTypes", this.supportLegacyDataTypes); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleAdWordsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleAdWordsLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleAdWordsLinkedServiceTypeProperties. + */ + public static GoogleAdWordsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleAdWordsLinkedServiceTypeProperties deserializedGoogleAdWordsLinkedServiceTypeProperties + = new GoogleAdWordsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("clientCustomerID".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.clientCustomerId = reader.readUntyped(); + } else if ("developerToken".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.developerToken = SecretBase.fromJson(reader); + } else if ("authenticationType".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.authenticationType + = GoogleAdWordsAuthenticationType.fromString(reader.getString()); + } else if ("refreshToken".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.refreshToken = SecretBase.fromJson(reader); + } else if ("clientId".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("email".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.email = reader.readUntyped(); + } else if ("keyFilePath".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.keyFilePath = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("privateKey".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.privateKey = SecretBase.fromJson(reader); + } else if ("loginCustomerID".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.loginCustomerId = reader.readUntyped(); + } else if ("googleAdsApiVersion".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.googleAdsApiVersion = reader.readUntyped(); + } else if ("supportLegacyDataTypes".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.supportLegacyDataTypes = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGoogleAdWordsLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleAdWordsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryDatasetTypeProperties.java index be09351ddaa7..22bf53cf6ee7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Google BigQuery Dataset Properties. */ @Fluent -public final class GoogleBigQueryDatasetTypeProperties { +public final class GoogleBigQueryDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using database + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Google BigQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The database name of the Google BigQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dataset") private Object dataset; /** @@ -109,4 +111,47 @@ public GoogleBigQueryDatasetTypeProperties withDataset(Object dataset) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("dataset", this.dataset); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleBigQueryDatasetTypeProperties. + */ + public static GoogleBigQueryDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryDatasetTypeProperties deserializedGoogleBigQueryDatasetTypeProperties + = new GoogleBigQueryDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedGoogleBigQueryDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedGoogleBigQueryDatasetTypeProperties.table = reader.readUntyped(); + } else if ("dataset".equals(fieldName)) { + deserializedGoogleBigQueryDatasetTypeProperties.dataset = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleBigQueryDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java index 547367f0dd9c..266d11110cad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryLinkedServiceTypeProperties.java @@ -6,26 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.GoogleBigQueryAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Google BigQuery service linked service properties. */ @Fluent -public final class GoogleBigQueryLinkedServiceTypeProperties { +public final class GoogleBigQueryLinkedServiceTypeProperties + implements JsonSerializable { /* * The default BigQuery project to query against. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "project", required = true) private Object project; /* * A comma-separated list of public BigQuery projects to access. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "additionalProjects") private Object additionalProjects; /* @@ -33,47 +36,40 @@ public final class GoogleBigQueryLinkedServiceTypeProperties { * combine BigQuery data with data from Google Drive. The default value is false. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "requestGoogleDriveScope") private Object requestGoogleDriveScope; /* * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on * self-hosted IR. */ - @JsonProperty(value = "authenticationType", required = true) private GoogleBigQueryAuthenticationType authenticationType; /* * The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */ - @JsonProperty(value = "refreshToken") private SecretBase refreshToken; /* * The client id of the google application used to acquire the refresh token. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret of the google application used to acquire the refresh token. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "email") private Object email; /* * The full path to the .p12 key file that is used to authenticate the service account email address and can only be * used on self-hosted IR. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "keyFilePath") private Object keyFilePath; /* @@ -81,21 +77,18 @@ public final class GoogleBigQueryLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false.Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -396,4 +389,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleBigQueryLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("project", this.project); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("additionalProjects", this.additionalProjects); + jsonWriter.writeUntypedField("requestGoogleDriveScope", this.requestGoogleDriveScope); + jsonWriter.writeJsonField("refreshToken", this.refreshToken); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("email", this.email); + jsonWriter.writeUntypedField("keyFilePath", this.keyFilePath); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryLinkedServiceTypeProperties. + */ + public static GoogleBigQueryLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryLinkedServiceTypeProperties deserializedGoogleBigQueryLinkedServiceTypeProperties + = new GoogleBigQueryLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("project".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.project = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.authenticationType + = GoogleBigQueryAuthenticationType.fromString(reader.getString()); + } else if ("additionalProjects".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.additionalProjects = reader.readUntyped(); + } else if ("requestGoogleDriveScope".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.requestGoogleDriveScope + = reader.readUntyped(); + } else if ("refreshToken".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.refreshToken = SecretBase.fromJson(reader); + } else if ("clientId".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("email".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.email = reader.readUntyped(); + } else if ("keyFilePath".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.keyFilePath = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGoogleBigQueryLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleBigQueryLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2DatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2DatasetTypeProperties.java index 09a9872a94b9..93b7a4a53e62 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2DatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2DatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Google BigQuery Dataset Properties. */ @Fluent -public final class GoogleBigQueryV2DatasetTypeProperties { +public final class GoogleBigQueryV2DatasetTypeProperties + implements JsonSerializable { /* * The table name of the Google BigQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The database name of the Google BigQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dataset") private Object dataset; /** @@ -81,4 +84,44 @@ public GoogleBigQueryV2DatasetTypeProperties withDataset(Object dataset) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("dataset", this.dataset); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryV2DatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryV2DatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleBigQueryV2DatasetTypeProperties. + */ + public static GoogleBigQueryV2DatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryV2DatasetTypeProperties deserializedGoogleBigQueryV2DatasetTypeProperties + = new GoogleBigQueryV2DatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("table".equals(fieldName)) { + deserializedGoogleBigQueryV2DatasetTypeProperties.table = reader.readUntyped(); + } else if ("dataset".equals(fieldName)) { + deserializedGoogleBigQueryV2DatasetTypeProperties.dataset = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleBigQueryV2DatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java index a0de67427d66..15694155068f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleBigQueryV2LinkedServiceTypeProperties.java @@ -6,58 +6,56 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2AuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Google BigQuery service linked service properties. */ @Fluent -public final class GoogleBigQueryV2LinkedServiceTypeProperties { +public final class GoogleBigQueryV2LinkedServiceTypeProperties + implements JsonSerializable { /* * The default BigQuery project id to query against. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "projectId", required = true) private Object projectId; /* * The OAuth 2.0 authentication mechanism used for authentication. */ - @JsonProperty(value = "authenticationType", required = true) private GoogleBigQueryV2AuthenticationType authenticationType; /* * The client id of the google application used to acquire the refresh token. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret of the google application used to acquire the refresh token. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */ - @JsonProperty(value = "refreshToken") private SecretBase refreshToken; /* * The content of the .json key file that is used to authenticate the service account. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "keyFileContent") private SecretBase keyFileContent; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -245,4 +243,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleBigQueryV2LinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("projectId", this.projectId); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeJsonField("refreshToken", this.refreshToken); + jsonWriter.writeJsonField("keyFileContent", this.keyFileContent); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryV2LinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryV2LinkedServiceTypeProperties. + */ + public static GoogleBigQueryV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryV2LinkedServiceTypeProperties deserializedGoogleBigQueryV2LinkedServiceTypeProperties + = new GoogleBigQueryV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("projectId".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.projectId = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.authenticationType + = GoogleBigQueryV2AuthenticationType.fromString(reader.getString()); + } else if ("clientId".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("refreshToken".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.refreshToken = SecretBase.fromJson(reader); + } else if ("keyFileContent".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.keyFileContent + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleBigQueryV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java index 469c9285bb5e..4516b7332e6c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleCloudStorageLinkedServiceTypeProperties.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Google Cloud Storage linked service properties. */ @Fluent -public final class GoogleCloudStorageLinkedServiceTypeProperties { +public final class GoogleCloudStorageLinkedServiceTypeProperties + implements JsonSerializable { /* * The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "accessKeyId") private Object accessKeyId; /* * The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. */ - @JsonProperty(value = "secretAccessKey") private SecretBase secretAccessKey; /* @@ -31,14 +34,12 @@ public final class GoogleCloudStorageLinkedServiceTypeProperties { * property; change it only if you want to try a different service endpoint or want to switch between https and * http. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -147,4 +148,51 @@ public void validate() { secretAccessKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accessKeyId", this.accessKeyId); + jsonWriter.writeJsonField("secretAccessKey", this.secretAccessKey); + jsonWriter.writeUntypedField("serviceUrl", this.serviceUrl); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleCloudStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleCloudStorageLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleCloudStorageLinkedServiceTypeProperties. + */ + public static GoogleCloudStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleCloudStorageLinkedServiceTypeProperties deserializedGoogleCloudStorageLinkedServiceTypeProperties + = new GoogleCloudStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accessKeyId".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedServiceTypeProperties.accessKeyId = reader.readUntyped(); + } else if ("secretAccessKey".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedServiceTypeProperties.secretAccessKey + = SecretBase.fromJson(reader); + } else if ("serviceUrl".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedServiceTypeProperties.serviceUrl = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleCloudStorageLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java index e795f26d2f00..cdc1e795055c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GoogleSheetsLinkedServiceTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * GoogleSheets linked service type properties. */ @Fluent -public final class GoogleSheetsLinkedServiceTypeProperties { +public final class GoogleSheetsLinkedServiceTypeProperties + implements JsonSerializable { /* * The api token for the GoogleSheets source. */ - @JsonProperty(value = "apiToken", required = true) private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -91,4 +94,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleSheetsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleSheetsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleSheetsLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleSheetsLinkedServiceTypeProperties. + */ + public static GoogleSheetsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleSheetsLinkedServiceTypeProperties deserializedGoogleSheetsLinkedServiceTypeProperties + = new GoogleSheetsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("apiToken".equals(fieldName)) { + deserializedGoogleSheetsLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGoogleSheetsLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGoogleSheetsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumDatasetTypeProperties.java index d27140a67206..33f07c91b5cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Greenplum Dataset Properties. */ @Fluent -public final class GreenplumDatasetTypeProperties { +public final class GreenplumDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of Greenplum. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of Greenplum. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,47 @@ public GreenplumDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GreenplumDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GreenplumDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GreenplumDatasetTypeProperties. + */ + public static GreenplumDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GreenplumDatasetTypeProperties deserializedGreenplumDatasetTypeProperties + = new GreenplumDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedGreenplumDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedGreenplumDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedGreenplumDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedGreenplumDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java index 33a1ee4acf75..bf46567980d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/GreenplumLinkedServiceTypeProperties.java @@ -5,31 +5,33 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Greenplum Database linked service properties. */ @Fluent -public final class GreenplumLinkedServiceTypeProperties { +public final class GreenplumLinkedServiceTypeProperties + implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "pwd") private AzureKeyVaultSecretReference pwd; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +114,48 @@ public void validate() { pwd().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("pwd", this.pwd); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GreenplumLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GreenplumLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GreenplumLinkedServiceTypeProperties. + */ + public static GreenplumLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GreenplumLinkedServiceTypeProperties deserializedGreenplumLinkedServiceTypeProperties + = new GreenplumLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedGreenplumLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("pwd".equals(fieldName)) { + deserializedGreenplumLinkedServiceTypeProperties.pwd + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedGreenplumLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGreenplumLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java index 394faf513026..7e733f2e31f2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HBaseLinkedServiceTypeProperties.java @@ -6,55 +6,52 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HBaseAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * HBase server linked service properties. */ @Fluent -public final class HBaseLinkedServiceTypeProperties { +public final class HBaseLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or host name of the HBase server. (i.e. 192.168.222.160) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. */ - @JsonProperty(value = "port") private Object port; /* * The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) */ - @JsonProperty(value = "httpPath") private Object httpPath; /* * The authentication mechanism to use to connect to the HBase server. */ - @JsonProperty(value = "authenticationType", required = true) private HBaseAuthenticationType authenticationType; /* * The user name used to connect to the HBase instance. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -62,27 +59,23 @@ public final class HBaseLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -349,4 +342,74 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HBaseLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("httpPath", this.httpPath); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HBaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HBaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HBaseLinkedServiceTypeProperties. + */ + public static HBaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HBaseLinkedServiceTypeProperties deserializedHBaseLinkedServiceTypeProperties + = new HBaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.authenticationType + = HBaseAuthenticationType.fromString(reader.getString()); + } else if ("port".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("httpPath".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.httpPath = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("enableSsl".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHBaseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedHBaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java index ad6764edf857..ca0d3994b5dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightHiveActivityTypeProperties.java @@ -5,10 +5,13 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -16,55 +19,46 @@ * HDInsight Hive activity properties. */ @Fluent -public final class HDInsightHiveActivityTypeProperties { +public final class HDInsightHiveActivityTypeProperties + implements JsonSerializable { /* * Storage linked service references. */ - @JsonProperty(value = "storageLinkedServices") private List storageLinkedServices; /* * User specified arguments to HDInsightActivity. */ - @JsonProperty(value = "arguments") private List arguments; /* * Debug info option. */ - @JsonProperty(value = "getDebugInfo") private HDInsightActivityDebugInfoOption getDebugInfo; /* * Script path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "scriptPath") private Object scriptPath; /* * Script linked service reference. */ - @JsonProperty(value = "scriptLinkedService") private LinkedServiceReference scriptLinkedService; /* * Allows user to specify defines for Hive job request. */ - @JsonProperty(value = "defines") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map defines; /* * User specified arguments under hivevar namespace. */ - @JsonProperty(value = "variables") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map variables; /* * Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) */ - @JsonProperty(value = "queryTimeout") private Integer queryTimeout; /** @@ -249,4 +243,71 @@ public void validate() { scriptLinkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("storageLinkedServices", this.storageLinkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("arguments", this.arguments, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("getDebugInfo", this.getDebugInfo == null ? null : this.getDebugInfo.toString()); + jsonWriter.writeUntypedField("scriptPath", this.scriptPath); + jsonWriter.writeJsonField("scriptLinkedService", this.scriptLinkedService); + jsonWriter.writeMapField("defines", this.defines, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("variables", this.variables, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeNumberField("queryTimeout", this.queryTimeout); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightHiveActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightHiveActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the HDInsightHiveActivityTypeProperties. + */ + public static HDInsightHiveActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightHiveActivityTypeProperties deserializedHDInsightHiveActivityTypeProperties + = new HDInsightHiveActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("storageLinkedServices".equals(fieldName)) { + List storageLinkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedHDInsightHiveActivityTypeProperties.storageLinkedServices = storageLinkedServices; + } else if ("arguments".equals(fieldName)) { + List arguments = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightHiveActivityTypeProperties.arguments = arguments; + } else if ("getDebugInfo".equals(fieldName)) { + deserializedHDInsightHiveActivityTypeProperties.getDebugInfo + = HDInsightActivityDebugInfoOption.fromString(reader.getString()); + } else if ("scriptPath".equals(fieldName)) { + deserializedHDInsightHiveActivityTypeProperties.scriptPath = reader.readUntyped(); + } else if ("scriptLinkedService".equals(fieldName)) { + deserializedHDInsightHiveActivityTypeProperties.scriptLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("defines".equals(fieldName)) { + Map defines = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightHiveActivityTypeProperties.defines = defines; + } else if ("variables".equals(fieldName)) { + Map variables = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightHiveActivityTypeProperties.variables = variables; + } else if ("queryTimeout".equals(fieldName)) { + deserializedHDInsightHiveActivityTypeProperties.queryTimeout + = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightHiveActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java index 3b33b794c45c..59152334ac9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightLinkedServiceTypeProperties.java @@ -6,63 +6,60 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * HDInsight linked service properties. */ @Fluent -public final class HDInsightLinkedServiceTypeProperties { +public final class HDInsightLinkedServiceTypeProperties + implements JsonSerializable { /* * HDInsight cluster URI. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clusterUri", required = true) private Object clusterUri; /* * HDInsight cluster user name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * HDInsight cluster password. */ - @JsonProperty(value = "password") private SecretBase password; /* * The Azure Storage linked service reference. */ - @JsonProperty(value = "linkedServiceName") private LinkedServiceReference linkedServiceName; /* * A reference to the Azure SQL linked service that points to the HCatalog database. */ - @JsonProperty(value = "hcatalogLinkedServiceName") private LinkedServiceReference hcatalogLinkedServiceName; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. */ - @JsonProperty(value = "isEspEnabled") private Object isEspEnabled; /* * Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "fileSystem") private Object fileSystem; /** @@ -263,4 +260,65 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("clusterUri", this.clusterUri); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeJsonField("hcatalogLinkedServiceName", this.hcatalogLinkedServiceName); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("isEspEnabled", this.isEspEnabled); + jsonWriter.writeUntypedField("fileSystem", this.fileSystem); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightLinkedServiceTypeProperties. + */ + public static HDInsightLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightLinkedServiceTypeProperties deserializedHDInsightLinkedServiceTypeProperties + = new HDInsightLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("clusterUri".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.clusterUri = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.linkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("hcatalogLinkedServiceName".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.hcatalogLinkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("isEspEnabled".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.isEspEnabled = reader.readUntyped(); + } else if ("fileSystem".equals(fieldName)) { + deserializedHDInsightLinkedServiceTypeProperties.fileSystem = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java index bbdb732e75a6..f04a708154aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightMapReduceActivityTypeProperties.java @@ -6,10 +6,13 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -17,54 +20,46 @@ * HDInsight MapReduce activity properties. */ @Fluent -public final class HDInsightMapReduceActivityTypeProperties { +public final class HDInsightMapReduceActivityTypeProperties + implements JsonSerializable { /* * Storage linked service references. */ - @JsonProperty(value = "storageLinkedServices") private List storageLinkedServices; /* * User specified arguments to HDInsightActivity. */ - @JsonProperty(value = "arguments") private List arguments; /* * Debug info option. */ - @JsonProperty(value = "getDebugInfo") private HDInsightActivityDebugInfoOption getDebugInfo; /* * Class name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "className", required = true) private Object className; /* * Jar path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "jarFilePath", required = true) private Object jarFilePath; /* * Jar linked service reference. */ - @JsonProperty(value = "jarLinkedService") private LinkedServiceReference jarLinkedService; /* * Jar libs. */ - @JsonProperty(value = "jarLibs") private List jarLibs; /* * Allows user to specify defines for the MapReduce job request. */ - @JsonProperty(value = "defines") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map defines; /** @@ -259,4 +254,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightMapReduceActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("className", this.className); + jsonWriter.writeUntypedField("jarFilePath", this.jarFilePath); + jsonWriter.writeArrayField("storageLinkedServices", this.storageLinkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("arguments", this.arguments, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("getDebugInfo", this.getDebugInfo == null ? null : this.getDebugInfo.toString()); + jsonWriter.writeJsonField("jarLinkedService", this.jarLinkedService); + jsonWriter.writeArrayField("jarLibs", this.jarLibs, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("defines", this.defines, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightMapReduceActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightMapReduceActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightMapReduceActivityTypeProperties. + */ + public static HDInsightMapReduceActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightMapReduceActivityTypeProperties deserializedHDInsightMapReduceActivityTypeProperties + = new HDInsightMapReduceActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("className".equals(fieldName)) { + deserializedHDInsightMapReduceActivityTypeProperties.className = reader.readUntyped(); + } else if ("jarFilePath".equals(fieldName)) { + deserializedHDInsightMapReduceActivityTypeProperties.jarFilePath = reader.readUntyped(); + } else if ("storageLinkedServices".equals(fieldName)) { + List storageLinkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedHDInsightMapReduceActivityTypeProperties.storageLinkedServices = storageLinkedServices; + } else if ("arguments".equals(fieldName)) { + List arguments = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightMapReduceActivityTypeProperties.arguments = arguments; + } else if ("getDebugInfo".equals(fieldName)) { + deserializedHDInsightMapReduceActivityTypeProperties.getDebugInfo + = HDInsightActivityDebugInfoOption.fromString(reader.getString()); + } else if ("jarLinkedService".equals(fieldName)) { + deserializedHDInsightMapReduceActivityTypeProperties.jarLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("jarLibs".equals(fieldName)) { + List jarLibs = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightMapReduceActivityTypeProperties.jarLibs = jarLibs; + } else if ("defines".equals(fieldName)) { + Map defines = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightMapReduceActivityTypeProperties.defines = defines; + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightMapReduceActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java index 50a8435d1474..50ca704200e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightOnDemandLinkedServiceTypeProperties.java @@ -6,22 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.ScriptAction; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * HDInsight ondemand linked service properties. */ @Fluent -public final class HDInsightOnDemandLinkedServiceTypeProperties { +public final class HDInsightOnDemandLinkedServiceTypeProperties + implements JsonSerializable { /* * Number of worker/data nodes in the cluster. Suggestion value: 4. Type: int (or Expression with resultType int). */ - @JsonProperty(value = "clusterSize", required = true) private Object clusterSize; /* @@ -29,180 +33,151 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { * stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum * value is 5 mins. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "timeToLive", required = true) private Object timeToLive; /* * Version of the HDInsight cluster.  Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version", required = true) private Object version; /* * Azure Storage linked service to be used by the on-demand cluster for storing and processing data. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "hostSubscriptionId", required = true) private Object hostSubscriptionId; /* * The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key for the service principal id. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tenant", required = true) private Object tenant; /* * The resource group where the cluster belongs. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clusterResourceGroup", required = true) private Object clusterResourceGroup; /* * The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "clusterNamePrefix") private Object clusterNamePrefix; /* * The username to access the cluster. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clusterUserName") private Object clusterUsername; /* * The password to access the cluster. */ - @JsonProperty(value = "clusterPassword") private SecretBase clusterPassword; /* * The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "clusterSshUserName") private Object clusterSshUsername; /* * The password to SSH remotely connect cluster’s node (for Linux). */ - @JsonProperty(value = "clusterSshPassword") private SecretBase clusterSshPassword; /* * Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can * register them on your behalf. */ - @JsonProperty(value = "additionalLinkedServiceNames") private List additionalLinkedServiceNames; /* * The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is * created by using the Azure SQL database as the metastore. */ - @JsonProperty(value = "hcatalogLinkedServiceName") private LinkedServiceReference hcatalogLinkedServiceName; /* * The cluster type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clusterType") private Object clusterType; /* * The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sparkVersion") private Object sparkVersion; /* * Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. */ - @JsonProperty(value = "coreConfiguration") private Object coreConfiguration; /* * Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "hBaseConfiguration") private Object hBaseConfiguration; /* * Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "hdfsConfiguration") private Object hdfsConfiguration; /* * Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "hiveConfiguration") private Object hiveConfiguration; /* * Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "mapReduceConfiguration") private Object mapReduceConfiguration; /* * Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "oozieConfiguration") private Object oozieConfiguration; /* * Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "stormConfiguration") private Object stormConfiguration; /* * Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. */ - @JsonProperty(value = "yarnConfiguration") private Object yarnConfiguration; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Specifies the size of the head node for the HDInsight cluster. */ - @JsonProperty(value = "headNodeSize") private Object headNodeSize; /* * Specifies the size of the data node for the HDInsight cluster. */ - @JsonProperty(value = "dataNodeSize") private Object dataNodeSize; /* * Specifies the size of the Zoo Keeper node for the HDInsight cluster. */ - @JsonProperty(value = "zookeeperNodeSize") private Object zookeeperNodeSize; /* @@ -210,27 +185,23 @@ public final class HDInsightOnDemandLinkedServiceTypeProperties { * https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure% * 2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */ - @JsonProperty(value = "scriptActions") private List scriptActions; /* * The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "virtualNetworkId") private Object virtualNetworkId; /* * The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is * required. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "subnetName") private Object subnetName; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -1043,4 +1014,154 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightOnDemandLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("clusterSize", this.clusterSize); + jsonWriter.writeUntypedField("timeToLive", this.timeToLive); + jsonWriter.writeUntypedField("version", this.version); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeUntypedField("hostSubscriptionId", this.hostSubscriptionId); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("clusterResourceGroup", this.clusterResourceGroup); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("clusterNamePrefix", this.clusterNamePrefix); + jsonWriter.writeUntypedField("clusterUserName", this.clusterUsername); + jsonWriter.writeJsonField("clusterPassword", this.clusterPassword); + jsonWriter.writeUntypedField("clusterSshUserName", this.clusterSshUsername); + jsonWriter.writeJsonField("clusterSshPassword", this.clusterSshPassword); + jsonWriter.writeArrayField("additionalLinkedServiceNames", this.additionalLinkedServiceNames, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("hcatalogLinkedServiceName", this.hcatalogLinkedServiceName); + jsonWriter.writeUntypedField("clusterType", this.clusterType); + jsonWriter.writeUntypedField("sparkVersion", this.sparkVersion); + jsonWriter.writeUntypedField("coreConfiguration", this.coreConfiguration); + jsonWriter.writeUntypedField("hBaseConfiguration", this.hBaseConfiguration); + jsonWriter.writeUntypedField("hdfsConfiguration", this.hdfsConfiguration); + jsonWriter.writeUntypedField("hiveConfiguration", this.hiveConfiguration); + jsonWriter.writeUntypedField("mapReduceConfiguration", this.mapReduceConfiguration); + jsonWriter.writeUntypedField("oozieConfiguration", this.oozieConfiguration); + jsonWriter.writeUntypedField("stormConfiguration", this.stormConfiguration); + jsonWriter.writeUntypedField("yarnConfiguration", this.yarnConfiguration); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("headNodeSize", this.headNodeSize); + jsonWriter.writeUntypedField("dataNodeSize", this.dataNodeSize); + jsonWriter.writeUntypedField("zookeeperNodeSize", this.zookeeperNodeSize); + jsonWriter.writeArrayField("scriptActions", this.scriptActions, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeUntypedField("virtualNetworkId", this.virtualNetworkId); + jsonWriter.writeUntypedField("subnetName", this.subnetName); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightOnDemandLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightOnDemandLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightOnDemandLinkedServiceTypeProperties. + */ + public static HDInsightOnDemandLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightOnDemandLinkedServiceTypeProperties deserializedHDInsightOnDemandLinkedServiceTypeProperties + = new HDInsightOnDemandLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("clusterSize".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterSize = reader.readUntyped(); + } else if ("timeToLive".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.timeToLive = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.version = reader.readUntyped(); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.linkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("hostSubscriptionId".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.hostSubscriptionId = reader.readUntyped(); + } else if ("tenant".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("clusterResourceGroup".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterResourceGroup + = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("clusterNamePrefix".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterNamePrefix = reader.readUntyped(); + } else if ("clusterUserName".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterUsername = reader.readUntyped(); + } else if ("clusterPassword".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterPassword + = SecretBase.fromJson(reader); + } else if ("clusterSshUserName".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterSshUsername = reader.readUntyped(); + } else if ("clusterSshPassword".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterSshPassword + = SecretBase.fromJson(reader); + } else if ("additionalLinkedServiceNames".equals(fieldName)) { + List additionalLinkedServiceNames + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedHDInsightOnDemandLinkedServiceTypeProperties.additionalLinkedServiceNames + = additionalLinkedServiceNames; + } else if ("hcatalogLinkedServiceName".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.hcatalogLinkedServiceName + = LinkedServiceReference.fromJson(reader); + } else if ("clusterType".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.clusterType = reader.readUntyped(); + } else if ("sparkVersion".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.sparkVersion = reader.readUntyped(); + } else if ("coreConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.coreConfiguration = reader.readUntyped(); + } else if ("hBaseConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.hBaseConfiguration = reader.readUntyped(); + } else if ("hdfsConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.hdfsConfiguration = reader.readUntyped(); + } else if ("hiveConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.hiveConfiguration = reader.readUntyped(); + } else if ("mapReduceConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.mapReduceConfiguration + = reader.readUntyped(); + } else if ("oozieConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.oozieConfiguration = reader.readUntyped(); + } else if ("stormConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.stormConfiguration = reader.readUntyped(); + } else if ("yarnConfiguration".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.yarnConfiguration = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("headNodeSize".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.headNodeSize = reader.readUntyped(); + } else if ("dataNodeSize".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.dataNodeSize = reader.readUntyped(); + } else if ("zookeeperNodeSize".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.zookeeperNodeSize = reader.readUntyped(); + } else if ("scriptActions".equals(fieldName)) { + List scriptActions = reader.readArray(reader1 -> ScriptAction.fromJson(reader1)); + deserializedHDInsightOnDemandLinkedServiceTypeProperties.scriptActions = scriptActions; + } else if ("virtualNetworkId".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.virtualNetworkId = reader.readUntyped(); + } else if ("subnetName".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.subnetName = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightOnDemandLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightPigActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightPigActivityTypeProperties.java index 58415ab36711..3957adbd53fd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightPigActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightPigActivityTypeProperties.java @@ -5,10 +5,13 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -16,42 +19,35 @@ * HDInsight Pig activity properties. */ @Fluent -public final class HDInsightPigActivityTypeProperties { +public final class HDInsightPigActivityTypeProperties implements JsonSerializable { /* * Storage linked service references. */ - @JsonProperty(value = "storageLinkedServices") private List storageLinkedServices; /* * User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). */ - @JsonProperty(value = "arguments") private Object arguments; /* * Debug info option. */ - @JsonProperty(value = "getDebugInfo") private HDInsightActivityDebugInfoOption getDebugInfo; /* * Script path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "scriptPath") private Object scriptPath; /* * Script linked service reference. */ - @JsonProperty(value = "scriptLinkedService") private LinkedServiceReference scriptLinkedService; /* * Allows user to specify defines for Pig job request. */ - @JsonProperty(value = "defines") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map defines; /** @@ -196,4 +192,62 @@ public void validate() { scriptLinkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("storageLinkedServices", this.storageLinkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeUntypedField("arguments", this.arguments); + jsonWriter.writeStringField("getDebugInfo", this.getDebugInfo == null ? null : this.getDebugInfo.toString()); + jsonWriter.writeUntypedField("scriptPath", this.scriptPath); + jsonWriter.writeJsonField("scriptLinkedService", this.scriptLinkedService); + jsonWriter.writeMapField("defines", this.defines, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightPigActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightPigActivityTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the HDInsightPigActivityTypeProperties. + */ + public static HDInsightPigActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightPigActivityTypeProperties deserializedHDInsightPigActivityTypeProperties + = new HDInsightPigActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("storageLinkedServices".equals(fieldName)) { + List storageLinkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedHDInsightPigActivityTypeProperties.storageLinkedServices = storageLinkedServices; + } else if ("arguments".equals(fieldName)) { + deserializedHDInsightPigActivityTypeProperties.arguments = reader.readUntyped(); + } else if ("getDebugInfo".equals(fieldName)) { + deserializedHDInsightPigActivityTypeProperties.getDebugInfo + = HDInsightActivityDebugInfoOption.fromString(reader.getString()); + } else if ("scriptPath".equals(fieldName)) { + deserializedHDInsightPigActivityTypeProperties.scriptPath = reader.readUntyped(); + } else if ("scriptLinkedService".equals(fieldName)) { + deserializedHDInsightPigActivityTypeProperties.scriptLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("defines".equals(fieldName)) { + Map defines = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightPigActivityTypeProperties.defines = defines; + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightPigActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java index 874d1a2055c5..e88cb8519c09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightSparkActivityTypeProperties.java @@ -6,10 +6,13 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -17,56 +20,48 @@ * HDInsight spark activity properties. */ @Fluent -public final class HDInsightSparkActivityTypeProperties { +public final class HDInsightSparkActivityTypeProperties + implements JsonSerializable { /* * The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "rootPath", required = true) private Object rootPath; /* * The relative path to the root folder of the code/package to be executed. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "entryFilePath", required = true) private Object entryFilePath; /* * The user-specified arguments to HDInsightSparkActivity. */ - @JsonProperty(value = "arguments") private List arguments; /* * Debug info option. */ - @JsonProperty(value = "getDebugInfo") private HDInsightActivityDebugInfoOption getDebugInfo; /* * The storage linked service for uploading the entry file and dependencies, and for receiving logs. */ - @JsonProperty(value = "sparkJobLinkedService") private LinkedServiceReference sparkJobLinkedService; /* * The application's Java/Spark main class. */ - @JsonProperty(value = "className") private String className; /* * The user to impersonate that will execute the job. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "proxyUser") private Object proxyUser; /* * Spark configuration property. */ - @JsonProperty(value = "sparkConfig") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map sparkConfig; /** @@ -266,4 +261,67 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightSparkActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("rootPath", this.rootPath); + jsonWriter.writeUntypedField("entryFilePath", this.entryFilePath); + jsonWriter.writeArrayField("arguments", this.arguments, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("getDebugInfo", this.getDebugInfo == null ? null : this.getDebugInfo.toString()); + jsonWriter.writeJsonField("sparkJobLinkedService", this.sparkJobLinkedService); + jsonWriter.writeStringField("className", this.className); + jsonWriter.writeUntypedField("proxyUser", this.proxyUser); + jsonWriter.writeMapField("sparkConfig", this.sparkConfig, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightSparkActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightSparkActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightSparkActivityTypeProperties. + */ + public static HDInsightSparkActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightSparkActivityTypeProperties deserializedHDInsightSparkActivityTypeProperties + = new HDInsightSparkActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("rootPath".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.rootPath = reader.readUntyped(); + } else if ("entryFilePath".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.entryFilePath = reader.readUntyped(); + } else if ("arguments".equals(fieldName)) { + List arguments = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightSparkActivityTypeProperties.arguments = arguments; + } else if ("getDebugInfo".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.getDebugInfo + = HDInsightActivityDebugInfoOption.fromString(reader.getString()); + } else if ("sparkJobLinkedService".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.sparkJobLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("className".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.className = reader.getString(); + } else if ("proxyUser".equals(fieldName)) { + deserializedHDInsightSparkActivityTypeProperties.proxyUser = reader.readUntyped(); + } else if ("sparkConfig".equals(fieldName)) { + Map sparkConfig = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightSparkActivityTypeProperties.sparkConfig = sparkConfig; + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightSparkActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java index 4ae5b90bd27f..3d147775ee2d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HDInsightStreamingActivityTypeProperties.java @@ -6,10 +6,13 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -17,78 +20,66 @@ * HDInsight streaming activity properties. */ @Fluent -public final class HDInsightStreamingActivityTypeProperties { +public final class HDInsightStreamingActivityTypeProperties + implements JsonSerializable { /* * Storage linked service references. */ - @JsonProperty(value = "storageLinkedServices") private List storageLinkedServices; /* * User specified arguments to HDInsightActivity. */ - @JsonProperty(value = "arguments") private List arguments; /* * Debug info option. */ - @JsonProperty(value = "getDebugInfo") private HDInsightActivityDebugInfoOption getDebugInfo; /* * Mapper executable name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "mapper", required = true) private Object mapper; /* * Reducer executable name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "reducer", required = true) private Object reducer; /* * Input blob path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "input", required = true) private Object input; /* * Output blob path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "output", required = true) private Object output; /* * Paths to streaming job files. Can be directories. */ - @JsonProperty(value = "filePaths", required = true) private List filePaths; /* * Linked service reference where the files are located. */ - @JsonProperty(value = "fileLinkedService") private LinkedServiceReference fileLinkedService; /* * Combiner executable name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "combiner") private Object combiner; /* * Command line environment values. */ - @JsonProperty(value = "commandEnvironment") private List commandEnvironment; /* * Allows user to specify defines for streaming job request. */ - @JsonProperty(value = "defines") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map defines; /** @@ -378,4 +369,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightStreamingActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("mapper", this.mapper); + jsonWriter.writeUntypedField("reducer", this.reducer); + jsonWriter.writeUntypedField("input", this.input); + jsonWriter.writeUntypedField("output", this.output); + jsonWriter.writeArrayField("filePaths", this.filePaths, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("storageLinkedServices", this.storageLinkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("arguments", this.arguments, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("getDebugInfo", this.getDebugInfo == null ? null : this.getDebugInfo.toString()); + jsonWriter.writeJsonField("fileLinkedService", this.fileLinkedService); + jsonWriter.writeUntypedField("combiner", this.combiner); + jsonWriter.writeArrayField("commandEnvironment", this.commandEnvironment, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("defines", this.defines, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightStreamingActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightStreamingActivityTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightStreamingActivityTypeProperties. + */ + public static HDInsightStreamingActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightStreamingActivityTypeProperties deserializedHDInsightStreamingActivityTypeProperties + = new HDInsightStreamingActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("mapper".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.mapper = reader.readUntyped(); + } else if ("reducer".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.reducer = reader.readUntyped(); + } else if ("input".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.input = reader.readUntyped(); + } else if ("output".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.output = reader.readUntyped(); + } else if ("filePaths".equals(fieldName)) { + List filePaths = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightStreamingActivityTypeProperties.filePaths = filePaths; + } else if ("storageLinkedServices".equals(fieldName)) { + List storageLinkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedHDInsightStreamingActivityTypeProperties.storageLinkedServices = storageLinkedServices; + } else if ("arguments".equals(fieldName)) { + List arguments = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightStreamingActivityTypeProperties.arguments = arguments; + } else if ("getDebugInfo".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.getDebugInfo + = HDInsightActivityDebugInfoOption.fromString(reader.getString()); + } else if ("fileLinkedService".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.fileLinkedService + = LinkedServiceReference.fromJson(reader); + } else if ("combiner".equals(fieldName)) { + deserializedHDInsightStreamingActivityTypeProperties.combiner = reader.readUntyped(); + } else if ("commandEnvironment".equals(fieldName)) { + List commandEnvironment = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightStreamingActivityTypeProperties.commandEnvironment = commandEnvironment; + } else if ("defines".equals(fieldName)) { + Map defines = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedHDInsightStreamingActivityTypeProperties.defines = defines; + } else { + reader.skipChildren(); + } + } + + return deserializedHDInsightStreamingActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java index 00968bf41692..40590f850174 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HdfsLinkedServiceTypeProperties.java @@ -6,45 +6,44 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * HDFS linked service properties. */ @Fluent -public final class HdfsLinkedServiceTypeProperties { +public final class HdfsLinkedServiceTypeProperties implements JsonSerializable { /* * The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * User name for Windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password for Windows authentication. */ - @JsonProperty(value = "password") private SecretBase password; /** @@ -178,4 +177,54 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HdfsLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HdfsLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HdfsLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HdfsLinkedServiceTypeProperties. + */ + public static HdfsLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HdfsLinkedServiceTypeProperties deserializedHdfsLinkedServiceTypeProperties + = new HdfsLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedHdfsLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedHdfsLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHdfsLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("userName".equals(fieldName)) { + deserializedHdfsLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedHdfsLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedHdfsLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveDatasetTypeProperties.java index 7a0a3116be55..3ff988d6e2af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Hive Properties. */ @Fluent -public final class HiveDatasetTypeProperties { +public final class HiveDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Hive. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Hive. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public HiveDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HiveDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HiveDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the HiveDatasetTypeProperties. + */ + public static HiveDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HiveDatasetTypeProperties deserializedHiveDatasetTypeProperties = new HiveDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedHiveDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedHiveDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedHiveDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedHiveDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java index 1d2ef02513e8..e31f467e6249 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HiveLinkedServiceTypeProperties.java @@ -6,88 +6,80 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HiveAuthenticationType; import com.azure.resourcemanager.datafactory.models.HiveServerType; import com.azure.resourcemanager.datafactory.models.HiveThriftTransportProtocol; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Hive Server linked service properties. */ @Fluent -public final class HiveLinkedServiceTypeProperties { +public final class HiveLinkedServiceTypeProperties implements JsonSerializable { /* * IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode * is enable). */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the Hive server uses to listen for client connections. */ - @JsonProperty(value = "port") private Object port; /* * The type of Hive server. */ - @JsonProperty(value = "serverType") private HiveServerType serverType; /* * The transport protocol to use in the Thrift layer. */ - @JsonProperty(value = "thriftTransportProtocol") private HiveThriftTransportProtocol thriftTransportProtocol; /* * The authentication method used to access the Hive server. */ - @JsonProperty(value = "authenticationType", required = true) private HiveAuthenticationType authenticationType; /* * true to indicate using the ZooKeeper service, false not. */ - @JsonProperty(value = "serviceDiscoveryMode") private Object serviceDiscoveryMode; /* * The namespace on ZooKeeper under which Hive Server 2 nodes are added. */ - @JsonProperty(value = "zooKeeperNameSpace") private Object zooKeeperNameSpace; /* * Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. */ - @JsonProperty(value = "useNativeQuery") private Object useNativeQuery; /* * The user name that you use to access Hive Server. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name that you provided in the Username field */ - @JsonProperty(value = "password") private SecretBase password; /* * The partial URL corresponding to the Hive server. */ - @JsonProperty(value = "httpPath") private Object httpPath; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -95,34 +87,29 @@ public final class HiveLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -512,4 +499,95 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HiveLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeStringField("serverType", this.serverType == null ? null : this.serverType.toString()); + jsonWriter.writeStringField("thriftTransportProtocol", + this.thriftTransportProtocol == null ? null : this.thriftTransportProtocol.toString()); + jsonWriter.writeUntypedField("serviceDiscoveryMode", this.serviceDiscoveryMode); + jsonWriter.writeUntypedField("zooKeeperNameSpace", this.zooKeeperNameSpace); + jsonWriter.writeUntypedField("useNativeQuery", this.useNativeQuery); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("httpPath", this.httpPath); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HiveLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HiveLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HiveLinkedServiceTypeProperties. + */ + public static HiveLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HiveLinkedServiceTypeProperties deserializedHiveLinkedServiceTypeProperties + = new HiveLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.authenticationType + = HiveAuthenticationType.fromString(reader.getString()); + } else if ("port".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("serverType".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.serverType + = HiveServerType.fromString(reader.getString()); + } else if ("thriftTransportProtocol".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.thriftTransportProtocol + = HiveThriftTransportProtocol.fromString(reader.getString()); + } else if ("serviceDiscoveryMode".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.serviceDiscoveryMode = reader.readUntyped(); + } else if ("zooKeeperNameSpace".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.zooKeeperNameSpace = reader.readUntyped(); + } else if ("useNativeQuery".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.useNativeQuery = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("httpPath".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.httpPath = reader.readUntyped(); + } else if ("enableSsl".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHiveLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedHiveLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java index f25a56e4d74f..01748c1c7de8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpDatasetTypeProperties.java @@ -5,32 +5,33 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this dataset type. */ @Fluent -public final class HttpDatasetTypeProperties { +public final class HttpDatasetTypeProperties implements JsonSerializable { /* * The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "relativeUrl") private Object relativeUrl; /* * The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "requestMethod") private Object requestMethod; /* * The body for the HTTP request. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "requestBody") private Object requestBody; /* @@ -38,19 +39,16 @@ public final class HttpDatasetTypeProperties { * ... * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). */ - @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* * The format of files. */ - @JsonProperty(value = "format") private DatasetStorageFormat format; /* * The data compression method used on files. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -202,4 +200,55 @@ public void validate() { compression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("relativeUrl", this.relativeUrl); + jsonWriter.writeUntypedField("requestMethod", this.requestMethod); + jsonWriter.writeUntypedField("requestBody", this.requestBody); + jsonWriter.writeUntypedField("additionalHeaders", this.additionalHeaders); + jsonWriter.writeJsonField("format", this.format); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the HttpDatasetTypeProperties. + */ + public static HttpDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpDatasetTypeProperties deserializedHttpDatasetTypeProperties = new HttpDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("relativeUrl".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.relativeUrl = reader.readUntyped(); + } else if ("requestMethod".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.requestMethod = reader.readUntyped(); + } else if ("requestBody".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.requestBody = reader.readUntyped(); + } else if ("additionalHeaders".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.additionalHeaders = reader.readUntyped(); + } else if ("format".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.format = DatasetStorageFormat.fromJson(reader); + } else if ("compression".equals(fieldName)) { + deserializedHttpDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedHttpDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java index 6db471f7adc1..e11114973ef1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java @@ -6,45 +6,44 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.HttpAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class HttpLinkedServiceTypeProperties { +public final class HttpLinkedServiceTypeProperties implements JsonSerializable { /* * The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The authentication type to be used to connect to the HTTP server. */ - @JsonProperty(value = "authenticationType") private HttpAuthenticationType authenticationType; /* * User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value * should be string type). */ - @JsonProperty(value = "authHeaders") private Object authHeaders; /* @@ -52,7 +51,6 @@ public final class HttpLinkedServiceTypeProperties { * authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "embeddedCertData") private Object embeddedCertData; /* @@ -60,21 +58,18 @@ public final class HttpLinkedServiceTypeProperties { * copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be * specified. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "certThumbprint") private Object certThumbprint; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; /** @@ -301,4 +296,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HttpLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("authHeaders", this.authHeaders); + jsonWriter.writeUntypedField("embeddedCertData", this.embeddedCertData); + jsonWriter.writeUntypedField("certThumbprint", this.certThumbprint); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("enableServerCertificateValidation", this.enableServerCertificateValidation); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HttpLinkedServiceTypeProperties. + */ + public static HttpLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpLinkedServiceTypeProperties deserializedHttpLinkedServiceTypeProperties + = new HttpLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.authenticationType + = HttpAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("authHeaders".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.authHeaders = reader.readUntyped(); + } else if ("embeddedCertData".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.embeddedCertData = reader.readUntyped(); + } else if ("certThumbprint".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.certThumbprint = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("enableServerCertificateValidation".equals(fieldName)) { + deserializedHttpLinkedServiceTypeProperties.enableServerCertificateValidation + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedHttpLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java index 4cb98b90b762..2b8e1d982644 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HubspotLinkedServiceTypeProperties.java @@ -6,62 +6,58 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Hubspot Service linked service properties. */ @Fluent -public final class HubspotLinkedServiceTypeProperties { +public final class HubspotLinkedServiceTypeProperties implements JsonSerializable { /* * The client ID associated with your Hubspot application. */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * The client secret associated with your Hubspot application. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The access token obtained when initially authenticating your OAuth integration. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * The refresh token obtained when initially authenticating your OAuth integration. */ - @JsonProperty(value = "refreshToken") private SecretBase refreshToken; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -261,4 +257,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HubspotLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeJsonField("refreshToken", this.refreshToken); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HubspotLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HubspotLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HubspotLinkedServiceTypeProperties. + */ + public static HubspotLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HubspotLinkedServiceTypeProperties deserializedHubspotLinkedServiceTypeProperties + = new HubspotLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("clientId".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("accessToken".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("refreshToken".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.refreshToken = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedHubspotLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedHubspotLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java index 6dbf45cf3ca6..35e84beced28 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IfConditionActivityTypeProperties.java @@ -6,35 +6,36 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.Expression; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * IfCondition activity properties. */ @Fluent -public final class IfConditionActivityTypeProperties { +public final class IfConditionActivityTypeProperties implements JsonSerializable { /* * An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities * or ifFalseActivities) that will be executed. */ - @JsonProperty(value = "expression", required = true) private Expression expression; /* * List of activities to execute if expression is evaluated to true. This is an optional property and if not * provided, the activity will exit without any action. */ - @JsonProperty(value = "ifTrueActivities") private List ifTrueActivities; /* * List of activities to execute if expression is evaluated to false. This is an optional property and if not * provided, the activity will exit without any action. */ - @JsonProperty(value = "ifFalseActivities") private List ifFalseActivities; /** @@ -131,4 +132,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IfConditionActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("expression", this.expression); + jsonWriter.writeArrayField("ifTrueActivities", this.ifTrueActivities, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("ifFalseActivities", this.ifFalseActivities, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IfConditionActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IfConditionActivityTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IfConditionActivityTypeProperties. + */ + public static IfConditionActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IfConditionActivityTypeProperties deserializedIfConditionActivityTypeProperties + = new IfConditionActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("expression".equals(fieldName)) { + deserializedIfConditionActivityTypeProperties.expression = Expression.fromJson(reader); + } else if ("ifTrueActivities".equals(fieldName)) { + List ifTrueActivities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedIfConditionActivityTypeProperties.ifTrueActivities = ifTrueActivities; + } else if ("ifFalseActivities".equals(fieldName)) { + List ifFalseActivities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedIfConditionActivityTypeProperties.ifFalseActivities = ifFalseActivities; + } else { + reader.skipChildren(); + } + } + + return deserializedIfConditionActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaDatasetTypeProperties.java index 573345592ed2..a8f443216ac6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Impala Dataset Properties. */ @Fluent -public final class ImpalaDatasetTypeProperties { +public final class ImpalaDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Impala. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Impala. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public ImpalaDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImpalaDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImpalaDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ImpalaDatasetTypeProperties. + */ + public static ImpalaDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImpalaDatasetTypeProperties deserializedImpalaDatasetTypeProperties = new ImpalaDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedImpalaDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedImpalaDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedImpalaDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedImpalaDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java index ec361c1b1f8c..6ee3a2560fb4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ImpalaLinkedServiceTypeProperties.java @@ -6,49 +6,47 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ImpalaAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Impala server linked service properties. */ @Fluent -public final class ImpalaLinkedServiceTypeProperties { +public final class ImpalaLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or host name of the Impala server. (i.e. 192.168.222.160) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the Impala server uses to listen for client connections. The default value is 21050. */ - @JsonProperty(value = "port") private Object port; /* * The authentication type to use. */ - @JsonProperty(value = "authenticationType", required = true) private ImpalaAuthenticationType authenticationType; /* * The user name used to access the Impala server. The default value is anonymous when using SASLUsername. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name when using UsernameAndPassword. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -56,34 +54,29 @@ public final class ImpalaLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -352,4 +345,74 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ImpalaLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImpalaLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImpalaLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ImpalaLinkedServiceTypeProperties. + */ + public static ImpalaLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImpalaLinkedServiceTypeProperties deserializedImpalaLinkedServiceTypeProperties + = new ImpalaLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.authenticationType + = ImpalaAuthenticationType.fromString(reader.getString()); + } else if ("port".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("enableSsl".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedImpalaLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedImpalaLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java index f9e165d54729..aa13825cfa55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixLinkedServiceTypeProperties.java @@ -6,51 +6,50 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Informix linked service properties. */ @Fluent -public final class InformixLinkedServiceTypeProperties { +public final class InformixLinkedServiceTypeProperties + implements JsonSerializable { /* * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and * Basic. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The access credential portion of the connection string specified in driver-specific property-value format. */ - @JsonProperty(value = "credential") private SecretBase credential; /* * User name for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -211,4 +210,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(InformixLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the InformixLinkedServiceTypeProperties. + */ + public static InformixLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixLinkedServiceTypeProperties deserializedInformixLinkedServiceTypeProperties + = new InformixLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.credential = SecretBase.fromJson(reader); + } else if ("userName".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedInformixLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedInformixLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixTableDatasetTypeProperties.java index 10db749f4e3a..5cc2931ca14f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/InformixTableDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Informix table dataset properties. */ @Fluent -public final class InformixTableDatasetTypeProperties { +public final class InformixTableDatasetTypeProperties implements JsonSerializable { /* * The Informix table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +54,41 @@ public InformixTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the InformixTableDatasetTypeProperties. + */ + public static InformixTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixTableDatasetTypeProperties deserializedInformixTableDatasetTypeProperties + = new InformixTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedInformixTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedInformixTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeAuthKeysInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeAuthKeysInner.java index 16de3c0a61a6..6896b99725fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeAuthKeysInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeAuthKeysInner.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The integration runtime authentication keys. */ @Fluent -public final class IntegrationRuntimeAuthKeysInner { +public final class IntegrationRuntimeAuthKeysInner implements JsonSerializable { /* * The primary integration runtime authentication key. */ - @JsonProperty(value = "authKey1") private String authKey1; /* * The secondary integration runtime authentication key. */ - @JsonProperty(value = "authKey2") private String authKey2; /** @@ -77,4 +79,44 @@ public IntegrationRuntimeAuthKeysInner withAuthKey2(String authKey2) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("authKey1", this.authKey1); + jsonWriter.writeStringField("authKey2", this.authKey2); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeAuthKeysInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeAuthKeysInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeAuthKeysInner. + */ + public static IntegrationRuntimeAuthKeysInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeAuthKeysInner deserializedIntegrationRuntimeAuthKeysInner + = new IntegrationRuntimeAuthKeysInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("authKey1".equals(fieldName)) { + deserializedIntegrationRuntimeAuthKeysInner.authKey1 = reader.getString(); + } else if ("authKey2".equals(fieldName)) { + deserializedIntegrationRuntimeAuthKeysInner.authKey2 = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeAuthKeysInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeConnectionInfoInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeConnectionInfoInner.java index 2e2df8051828..de65a19f43f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeConnectionInfoInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeConnectionInfoInner.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,47 +17,41 @@ * Connection information for encrypting the on-premises data source credentials. */ @Fluent -public final class IntegrationRuntimeConnectionInfoInner { +public final class IntegrationRuntimeConnectionInfoInner + implements JsonSerializable { /* * The token generated in service. Callers use this token to authenticate to integration runtime. */ - @JsonProperty(value = "serviceToken", access = JsonProperty.Access.WRITE_ONLY) private String serviceToken; /* * The integration runtime SSL certificate thumbprint. Click-Once application uses it to do server validation. */ - @JsonProperty(value = "identityCertThumbprint", access = JsonProperty.Access.WRITE_ONLY) private String identityCertThumbprint; /* * The on-premises integration runtime host URL. */ - @JsonProperty(value = "hostServiceUri", access = JsonProperty.Access.WRITE_ONLY) private String hostServiceUri; /* * The integration runtime version. */ - @JsonProperty(value = "version", access = JsonProperty.Access.WRITE_ONLY) private String version; /* * The public key for encrypting a credential when transferring the credential to the integration runtime. */ - @JsonProperty(value = "publicKey", access = JsonProperty.Access.WRITE_ONLY) private String publicKey; /* * Whether the identity certificate is expired. */ - @JsonProperty(value = "isIdentityCertExprired", access = JsonProperty.Access.WRITE_ONLY) private Boolean isIdentityCertExprired; /* * Connection information for encrypting the on-premises data source credentials. */ - @JsonIgnore private Map additionalProperties; /** @@ -128,7 +123,6 @@ public Boolean isIdentityCertExprired() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -145,14 +139,6 @@ public IntegrationRuntimeConnectionInfoInner withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -160,4 +146,62 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeConnectionInfoInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeConnectionInfoInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeConnectionInfoInner. + */ + public static IntegrationRuntimeConnectionInfoInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeConnectionInfoInner deserializedIntegrationRuntimeConnectionInfoInner + = new IntegrationRuntimeConnectionInfoInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serviceToken".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.serviceToken = reader.getString(); + } else if ("identityCertThumbprint".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.identityCertThumbprint = reader.getString(); + } else if ("hostServiceUri".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.hostServiceUri = reader.getString(); + } else if ("version".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.version = reader.getString(); + } else if ("publicKey".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.publicKey = reader.getString(); + } else if ("isIdentityCertExprired".equals(fieldName)) { + deserializedIntegrationRuntimeConnectionInfoInner.isIdentityCertExprired + = reader.getNullable(JsonReader::getBoolean); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeConnectionInfoInner.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeConnectionInfoInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeMonitoringDataInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeMonitoringDataInner.java index c9792f68cd9e..060052bf123b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeMonitoringDataInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeMonitoringDataInner.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeMonitoringData; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Get monitoring data response. */ @Fluent -public final class IntegrationRuntimeMonitoringDataInner { +public final class IntegrationRuntimeMonitoringDataInner + implements JsonSerializable { /* * Integration runtime name. */ - @JsonProperty(value = "name") private String name; /* * Integration runtime node monitoring data. */ - @JsonProperty(value = "nodes") private List nodes; /** @@ -82,4 +85,46 @@ public void validate() { nodes().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeArrayField("nodes", this.nodes, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeMonitoringDataInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeMonitoringDataInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeMonitoringDataInner. + */ + public static IntegrationRuntimeMonitoringDataInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeMonitoringDataInner deserializedIntegrationRuntimeMonitoringDataInner + = new IntegrationRuntimeMonitoringDataInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedIntegrationRuntimeMonitoringDataInner.name = reader.getString(); + } else if ("nodes".equals(fieldName)) { + List nodes + = reader.readArray(reader1 -> IntegrationRuntimeNodeMonitoringData.fromJson(reader1)); + deserializedIntegrationRuntimeMonitoringDataInner.nodes = nodes; + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeMonitoringDataInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeNodeIpAddressInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeNodeIpAddressInner.java index f14513134ce3..2fbab99f4566 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeNodeIpAddressInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeNodeIpAddressInner.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The IP address of self-hosted integration runtime node. */ @Immutable -public final class IntegrationRuntimeNodeIpAddressInner { +public final class IntegrationRuntimeNodeIpAddressInner + implements JsonSerializable { /* * The IP address of self-hosted integration runtime node. */ - @JsonProperty(value = "ipAddress", access = JsonProperty.Access.WRITE_ONLY) private String ipAddress; /** @@ -40,4 +44,40 @@ public String ipAddress() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeNodeIpAddressInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeNodeIpAddressInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeNodeIpAddressInner. + */ + public static IntegrationRuntimeNodeIpAddressInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeNodeIpAddressInner deserializedIntegrationRuntimeNodeIpAddressInner + = new IntegrationRuntimeNodeIpAddressInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("ipAddress".equals(fieldName)) { + deserializedIntegrationRuntimeNodeIpAddressInner.ipAddress = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeNodeIpAddressInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.java index 740050625f20..99e8f84df261 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.java @@ -5,19 +5,23 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Azure-SSIS integration runtime outbound network dependency endpoints. */ @Fluent -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner { +public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner + implements JsonSerializable { /* * The list of outbound network dependency endpoints. */ - @JsonProperty(value = "value") private List value; /** @@ -57,4 +61,45 @@ public void validate() { value().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner if the JsonReader was + * pointing to an instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the + * IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner. + */ + public static IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner + = new IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray( + reader1 -> IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.fromJson(reader1)); + deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.value = value; + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java index 2b428d34dcf2..8573900c9b85 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Integration runtime resource type. @@ -18,25 +21,21 @@ public final class IntegrationRuntimeResourceInner extends SubResource { /* * Integration runtime properties. */ - @JsonProperty(value = "properties", required = true) private IntegrationRuntime properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IntegrationRuntimeResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeResourceInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IntegrationRuntimeResourceInner. + */ + public static IntegrationRuntimeResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeResourceInner deserializedIntegrationRuntimeResourceInner + = new IntegrationRuntimeResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedIntegrationRuntimeResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedIntegrationRuntimeResourceInner.properties = IntegrationRuntime.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedIntegrationRuntimeResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedIntegrationRuntimeResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedIntegrationRuntimeResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java index 1ac377c03764..e240496146e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/IntegrationRuntimeStatusResponseInner.java @@ -6,24 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Integration runtime status response. */ @Fluent -public final class IntegrationRuntimeStatusResponseInner { +public final class IntegrationRuntimeStatusResponseInner + implements JsonSerializable { /* * The integration runtime name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * Integration runtime properties. */ - @JsonProperty(value = "properties", required = true) private IntegrationRuntimeStatus properties; /** @@ -77,4 +80,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IntegrationRuntimeStatusResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeStatusResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeStatusResponseInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IntegrationRuntimeStatusResponseInner. + */ + public static IntegrationRuntimeStatusResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeStatusResponseInner deserializedIntegrationRuntimeStatusResponseInner + = new IntegrationRuntimeStatusResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("properties".equals(fieldName)) { + deserializedIntegrationRuntimeStatusResponseInner.properties + = IntegrationRuntimeStatus.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedIntegrationRuntimeStatusResponseInner.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeStatusResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java index 09851276cc78..4e30d786a32b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JiraLinkedServiceTypeProperties.java @@ -6,63 +6,59 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Jira Service linked service properties. */ @Fluent -public final class JiraLinkedServiceTypeProperties { +public final class JiraLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or host name of the Jira service. (e.g. jira.example.com) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting * through HTTPS, or 8080 if connecting through HTTP. */ - @JsonProperty(value = "port") private Object port; /* * The user name that you use to access Jira Service. */ - @JsonProperty(value = "username", required = true) private Object username; /* * The password corresponding to the user name that you provided in the username field. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -263,4 +259,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(JiraLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JiraLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JiraLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the JiraLinkedServiceTypeProperties. + */ + public static JiraLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JiraLinkedServiceTypeProperties deserializedJiraLinkedServiceTypeProperties + = new JiraLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedJiraLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedJiraLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java index d176eb86e2af..ac6cecea64cd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/JsonDatasetTypeProperties.java @@ -6,19 +6,22 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Json dataset properties. */ @Fluent -public final class JsonDatasetTypeProperties { +public final class JsonDatasetTypeProperties implements JsonSerializable { /* * The location of the json data storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* @@ -27,13 +30,11 @@ public final class JsonDatasetTypeProperties { * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "encodingName") private Object encodingName; /* * The data compression method used for the json dataset. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -127,4 +128,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(JsonDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("encodingName", this.encodingName); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the JsonDatasetTypeProperties. + */ + public static JsonDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonDatasetTypeProperties deserializedJsonDatasetTypeProperties = new JsonDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedJsonDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("encodingName".equals(fieldName)) { + deserializedJsonDatasetTypeProperties.encodingName = reader.readUntyped(); + } else if ("compression".equals(fieldName)) { + deserializedJsonDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedJsonDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java index 11ed3be7be8f..599e35232179 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseLinkedServiceTypeProperties.java @@ -5,58 +5,56 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Microsoft Fabric LakeHouse linked service properties. */ @Fluent -public final class LakeHouseLinkedServiceTypeProperties { +public final class LakeHouseLinkedServiceTypeProperties + implements JsonSerializable { /* * The ID of Microsoft Fabric workspace. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "workspaceId") private Object workspaceId; /* * The ID of Microsoft Fabric LakeHouse artifact. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "artifactId") private Object artifactId; /* * The ID of the application used to authenticate against Microsoft Fabric LakeHouse. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The Key of the application used to authenticate against Microsoft Fabric LakeHouse. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -65,7 +63,6 @@ public final class LakeHouseLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /** @@ -270,4 +267,64 @@ public void validate() { servicePrincipalCredential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("workspaceId", this.workspaceId); + jsonWriter.writeUntypedField("artifactId", this.artifactId); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseLinkedServiceTypeProperties. + */ + public static LakeHouseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseLinkedServiceTypeProperties deserializedLakeHouseLinkedServiceTypeProperties + = new LakeHouseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("workspaceId".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.workspaceId = reader.readUntyped(); + } else if ("artifactId".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.artifactId = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedLakeHouseLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedLakeHouseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java index e716cea4d122..d688a930fa50 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LakeHouseTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Microsoft Fabric LakeHouse Table dataset properties. */ @Fluent -public final class LakeHouseTableDatasetTypeProperties { +public final class LakeHouseTableDatasetTypeProperties + implements JsonSerializable { /* * The schema name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +84,44 @@ public LakeHouseTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseTableDatasetTypeProperties. + */ + public static LakeHouseTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseTableDatasetTypeProperties deserializedLakeHouseTableDatasetTypeProperties + = new LakeHouseTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedLakeHouseTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedLakeHouseTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedLakeHouseTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java index ce0f9658ed58..9f1060d8b111 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LicensedComponentSetupTypeProperties.java @@ -6,24 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Installation of licensed component setup type properties. */ @Fluent -public final class LicensedComponentSetupTypeProperties { +public final class LicensedComponentSetupTypeProperties + implements JsonSerializable { /* * The name of the 3rd party component. */ - @JsonProperty(value = "componentName", required = true) private String componentName; /* * The license key to activate the component. */ - @JsonProperty(value = "licenseKey") private SecretBase licenseKey; /** @@ -89,4 +92,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LicensedComponentSetupTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("componentName", this.componentName); + jsonWriter.writeJsonField("licenseKey", this.licenseKey); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LicensedComponentSetupTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LicensedComponentSetupTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LicensedComponentSetupTypeProperties. + */ + public static LicensedComponentSetupTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LicensedComponentSetupTypeProperties deserializedLicensedComponentSetupTypeProperties + = new LicensedComponentSetupTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("componentName".equals(fieldName)) { + deserializedLicensedComponentSetupTypeProperties.componentName = reader.getString(); + } else if ("licenseKey".equals(fieldName)) { + deserializedLicensedComponentSetupTypeProperties.licenseKey = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedLicensedComponentSetupTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java index f62169a801d3..35f54170377b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LinkedServiceResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Linked service resource type. @@ -18,25 +21,21 @@ public final class LinkedServiceResourceInner extends SubResource { /* * Properties of linked service. */ - @JsonProperty(value = "properties", required = true) private LinkedService properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedServiceResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedServiceResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedServiceResourceInner if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedServiceResourceInner. + */ + public static LinkedServiceResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedServiceResourceInner deserializedLinkedServiceResourceInner = new LinkedServiceResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedLinkedServiceResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedLinkedServiceResourceInner.properties = LinkedService.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedLinkedServiceResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedLinkedServiceResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedLinkedServiceResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedServiceResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java index f6039f349d8e..bda72ed5f63b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/LookupActivityTypeProperties.java @@ -6,32 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CopySource; import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Lookup activity properties. */ @Fluent -public final class LookupActivityTypeProperties { +public final class LookupActivityTypeProperties implements JsonSerializable { /* * Dataset-specific source properties, same as copy activity source. */ - @JsonProperty(value = "source", required = true) private CopySource source; /* * Lookup activity dataset reference. */ - @JsonProperty(value = "dataset", required = true) private DatasetReference dataset; /* * Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "firstRowOnly") private Object firstRowOnly; /** @@ -125,4 +126,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LookupActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("source", this.source); + jsonWriter.writeJsonField("dataset", this.dataset); + jsonWriter.writeUntypedField("firstRowOnly", this.firstRowOnly); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LookupActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LookupActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LookupActivityTypeProperties. + */ + public static LookupActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LookupActivityTypeProperties deserializedLookupActivityTypeProperties = new LookupActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("source".equals(fieldName)) { + deserializedLookupActivityTypeProperties.source = CopySource.fromJson(reader); + } else if ("dataset".equals(fieldName)) { + deserializedLookupActivityTypeProperties.dataset = DatasetReference.fromJson(reader); + } else if ("firstRowOnly".equals(fieldName)) { + deserializedLookupActivityTypeProperties.firstRowOnly = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedLookupActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java index 7e3f5593fd11..8515841630fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MagentoLinkedServiceTypeProperties.java @@ -6,50 +6,48 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Magento server linked service properties. */ @Fluent -public final class MagentoLinkedServiceTypeProperties { +public final class MagentoLinkedServiceTypeProperties implements JsonSerializable { /* * The URL of the Magento instance. (i.e. 192.168.222.110/magento3) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The access token from Magento. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -203,4 +201,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MagentoLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MagentoLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MagentoLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MagentoLinkedServiceTypeProperties. + */ + public static MagentoLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MagentoLinkedServiceTypeProperties deserializedMagentoLinkedServiceTypeProperties + = new MagentoLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("accessToken".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMagentoLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMagentoLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityTypeProperties.java index 54c550be19d9..bf21c7c3997b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIdentityTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Managed identity type properties. */ @Fluent -public final class ManagedIdentityTypeProperties { +public final class ManagedIdentityTypeProperties implements JsonSerializable { /* * The resource id of user assigned managed identity */ - @JsonProperty(value = "resourceId") private String resourceId; /** @@ -51,4 +54,41 @@ public ManagedIdentityTypeProperties withResourceId(String resourceId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("resourceId", this.resourceId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIdentityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIdentityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIdentityTypeProperties. + */ + public static ManagedIdentityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIdentityTypeProperties deserializedManagedIdentityTypeProperties + = new ManagedIdentityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("resourceId".equals(fieldName)) { + deserializedManagedIdentityTypeProperties.resourceId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedIdentityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeStatusTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeStatusTypeProperties.java index 05d83fa7c1b9..fbaba202bf0f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeStatusTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeStatusTypeProperties.java @@ -5,10 +5,15 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Immutable; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeError; import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeNode; import com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeOperationResult; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.List; @@ -16,29 +21,26 @@ * Managed integration runtime status type properties. */ @Immutable -public final class ManagedIntegrationRuntimeStatusTypeProperties { +public final class ManagedIntegrationRuntimeStatusTypeProperties + implements JsonSerializable { /* * The time at which the integration runtime was created, in ISO8601 format. */ - @JsonProperty(value = "createTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime createTime; /* * The list of nodes for managed integration runtime. */ - @JsonProperty(value = "nodes", access = JsonProperty.Access.WRITE_ONLY) private List nodes; /* * The errors that occurred on this integration runtime. */ - @JsonProperty(value = "otherErrors", access = JsonProperty.Access.WRITE_ONLY) private List otherErrors; /* * The last operation result that occurred on this integration runtime. */ - @JsonProperty(value = "lastOperation", access = JsonProperty.Access.WRITE_ONLY) private ManagedIntegrationRuntimeOperationResult lastOperation; /** @@ -99,4 +101,52 @@ public void validate() { lastOperation().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeStatusTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeStatusTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeStatusTypeProperties. + */ + public static ManagedIntegrationRuntimeStatusTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeStatusTypeProperties deserializedManagedIntegrationRuntimeStatusTypeProperties + = new ManagedIntegrationRuntimeStatusTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("createTime".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatusTypeProperties.createTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("nodes".equals(fieldName)) { + List nodes + = reader.readArray(reader1 -> ManagedIntegrationRuntimeNode.fromJson(reader1)); + deserializedManagedIntegrationRuntimeStatusTypeProperties.nodes = nodes; + } else if ("otherErrors".equals(fieldName)) { + List otherErrors + = reader.readArray(reader1 -> ManagedIntegrationRuntimeError.fromJson(reader1)); + deserializedManagedIntegrationRuntimeStatusTypeProperties.otherErrors = otherErrors; + } else if ("lastOperation".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatusTypeProperties.lastOperation + = ManagedIntegrationRuntimeOperationResult.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedIntegrationRuntimeStatusTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java index a6be27991ce3..0cea5fe1ee96 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedIntegrationRuntimeTypeProperties.java @@ -5,32 +5,34 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeComputeProperties; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeSsisProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Managed integration runtime type properties. */ @Fluent -public final class ManagedIntegrationRuntimeTypeProperties { +public final class ManagedIntegrationRuntimeTypeProperties + implements JsonSerializable { /* * The compute resource for managed integration runtime. */ - @JsonProperty(value = "computeProperties") private IntegrationRuntimeComputeProperties computeProperties; /* * SSIS properties for managed integration runtime. */ - @JsonProperty(value = "ssisProperties") private IntegrationRuntimeSsisProperties ssisProperties; /* * The name of virtual network to which Azure-SSIS integration runtime will join */ - @JsonProperty(value = "customerVirtualNetwork") private IntegrationRuntimeCustomerVirtualNetwork customerVirtualNetwork; /** @@ -119,4 +121,50 @@ public void validate() { customerVirtualNetwork().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("computeProperties", this.computeProperties); + jsonWriter.writeJsonField("ssisProperties", this.ssisProperties); + jsonWriter.writeJsonField("customerVirtualNetwork", this.customerVirtualNetwork); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeTypeProperties. + */ + public static ManagedIntegrationRuntimeTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeTypeProperties deserializedManagedIntegrationRuntimeTypeProperties + = new ManagedIntegrationRuntimeTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("computeProperties".equals(fieldName)) { + deserializedManagedIntegrationRuntimeTypeProperties.computeProperties + = IntegrationRuntimeComputeProperties.fromJson(reader); + } else if ("ssisProperties".equals(fieldName)) { + deserializedManagedIntegrationRuntimeTypeProperties.ssisProperties + = IntegrationRuntimeSsisProperties.fromJson(reader); + } else if ("customerVirtualNetwork".equals(fieldName)) { + deserializedManagedIntegrationRuntimeTypeProperties.customerVirtualNetwork + = IntegrationRuntimeCustomerVirtualNetwork.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedIntegrationRuntimeTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java index 2049f083e239..367f0e1e06a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedPrivateEndpointResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Managed private endpoint resource type. @@ -18,25 +21,21 @@ public final class ManagedPrivateEndpointResourceInner extends SubResource { /* * Managed private endpoint properties. */ - @JsonProperty(value = "properties", required = true) private ManagedPrivateEndpoint properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedPrivateEndpointResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedPrivateEndpointResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedPrivateEndpointResourceInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedPrivateEndpointResourceInner. + */ + public static ManagedPrivateEndpointResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedPrivateEndpointResourceInner deserializedManagedPrivateEndpointResourceInner + = new ManagedPrivateEndpointResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedManagedPrivateEndpointResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedManagedPrivateEndpointResourceInner.properties + = ManagedPrivateEndpoint.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedManagedPrivateEndpointResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedManagedPrivateEndpointResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedManagedPrivateEndpointResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedPrivateEndpointResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java index c212dc039271..0443e05698e0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ManagedVirtualNetworkResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Managed Virtual Network resource type. @@ -18,25 +21,21 @@ public final class ManagedVirtualNetworkResourceInner extends SubResource { /* * Managed Virtual Network properties. */ - @JsonProperty(value = "properties", required = true) private ManagedVirtualNetwork properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedVirtualNetworkResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedVirtualNetworkResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedVirtualNetworkResourceInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedVirtualNetworkResourceInner. + */ + public static ManagedVirtualNetworkResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedVirtualNetworkResourceInner deserializedManagedVirtualNetworkResourceInner + = new ManagedVirtualNetworkResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedManagedVirtualNetworkResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedManagedVirtualNetworkResourceInner.properties = ManagedVirtualNetwork.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedManagedVirtualNetworkResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedManagedVirtualNetworkResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedManagedVirtualNetworkResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedVirtualNetworkResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MapperTableProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MapperTableProperties.java index 3ed9e3fea8ca..5eb748a5395f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MapperTableProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MapperTableProperties.java @@ -5,26 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Properties for a CDC table. */ @Fluent -public final class MapperTableProperties { +public final class MapperTableProperties implements JsonSerializable { /* * List of columns for the source table. */ - @JsonProperty(value = "schema") private List schema; /* * List of name/value pairs for connection properties. */ - @JsonProperty(value = "dslConnectorProperties") private List dslConnectorProperties; /** @@ -86,4 +88,47 @@ public void validate() { dslConnectorProperties().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("schema", this.schema, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("dslConnectorProperties", this.dslConnectorProperties, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperTableProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperTableProperties if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperTableProperties. + */ + public static MapperTableProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperTableProperties deserializedMapperTableProperties = new MapperTableProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + List schema = reader.readArray(reader1 -> MapperTableSchema.fromJson(reader1)); + deserializedMapperTableProperties.schema = schema; + } else if ("dslConnectorProperties".equals(fieldName)) { + List dslConnectorProperties + = reader.readArray(reader1 -> MapperDslConnectorProperties.fromJson(reader1)); + deserializedMapperTableProperties.dslConnectorProperties = dslConnectorProperties; + } else { + reader.skipChildren(); + } + } + + return deserializedMapperTableProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MappingDataFlowTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MappingDataFlowTypeProperties.java index fd1629c49c2d..0223c57a3079 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MappingDataFlowTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MappingDataFlowTypeProperties.java @@ -5,45 +5,44 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DataFlowSink; import com.azure.resourcemanager.datafactory.models.DataFlowSource; import com.azure.resourcemanager.datafactory.models.Transformation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Mapping data flow type properties. */ @Fluent -public final class MappingDataFlowTypeProperties { +public final class MappingDataFlowTypeProperties implements JsonSerializable { /* * List of sources in data flow. */ - @JsonProperty(value = "sources") private List sources; /* * List of sinks in data flow. */ - @JsonProperty(value = "sinks") private List sinks; /* * List of transformations in data flow. */ - @JsonProperty(value = "transformations") private List transformations; /* * DataFlow script. */ - @JsonProperty(value = "script") private String script; /* * Data flow script lines. */ - @JsonProperty(value = "scriptLines") private List scriptLines; /** @@ -168,4 +167,59 @@ public void validate() { transformations().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sources", this.sources, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("sinks", this.sinks, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("transformations", this.transformations, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("script", this.script); + jsonWriter.writeArrayField("scriptLines", this.scriptLines, (writer, element) -> writer.writeString(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MappingDataFlowTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MappingDataFlowTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MappingDataFlowTypeProperties. + */ + public static MappingDataFlowTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MappingDataFlowTypeProperties deserializedMappingDataFlowTypeProperties + = new MappingDataFlowTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sources".equals(fieldName)) { + List sources = reader.readArray(reader1 -> DataFlowSource.fromJson(reader1)); + deserializedMappingDataFlowTypeProperties.sources = sources; + } else if ("sinks".equals(fieldName)) { + List sinks = reader.readArray(reader1 -> DataFlowSink.fromJson(reader1)); + deserializedMappingDataFlowTypeProperties.sinks = sinks; + } else if ("transformations".equals(fieldName)) { + List transformations + = reader.readArray(reader1 -> Transformation.fromJson(reader1)); + deserializedMappingDataFlowTypeProperties.transformations = transformations; + } else if ("script".equals(fieldName)) { + deserializedMappingDataFlowTypeProperties.script = reader.getString(); + } else if ("scriptLines".equals(fieldName)) { + List scriptLines = reader.readArray(reader1 -> reader1.getString()); + deserializedMappingDataFlowTypeProperties.scriptLines = scriptLines; + } else { + reader.skipChildren(); + } + } + + return deserializedMappingDataFlowTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java index a3ed231bd69b..febd8c48bdd0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MariaDBLinkedServiceTypeProperties.java @@ -5,62 +5,58 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * MariaDB server linked service properties. */ @Fluent -public final class MariaDBLinkedServiceTypeProperties { +public final class MariaDBLinkedServiceTypeProperties implements JsonSerializable { /* * The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support * connection string and property bag, V2 can only support connection string. */ - @JsonProperty(value = "driverVersion") private Object driverVersion; /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * Server name for connection. Type: string. */ - @JsonProperty(value = "server") private Object server; /* * The port for the connection. Type: integer. */ - @JsonProperty(value = "port") private Object port; /* * Username for authentication. Type: string. */ - @JsonProperty(value = "username") private Object username; /* * Database name for connection. Type: string. */ - @JsonProperty(value = "database") private Object database; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -245,4 +241,63 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("driverVersion", this.driverVersion); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MariaDBLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MariaDBLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MariaDBLinkedServiceTypeProperties. + */ + public static MariaDBLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MariaDBLinkedServiceTypeProperties deserializedMariaDBLinkedServiceTypeProperties + = new MariaDBLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("driverVersion".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.driverVersion = reader.readUntyped(); + } else if ("connectionString".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("server".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMariaDBLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMariaDBLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java index 6fd72942db43..dd34302872c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MarketoLinkedServiceTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Marketo server linked service properties. */ @Fluent -public final class MarketoLinkedServiceTypeProperties { +public final class MarketoLinkedServiceTypeProperties implements JsonSerializable { /* * The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The client Id of your Marketo service. */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * The client secret of your Marketo service. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -234,4 +231,60 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MarketoLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MarketoLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MarketoLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MarketoLinkedServiceTypeProperties. + */ + public static MarketoLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MarketoLinkedServiceTypeProperties deserializedMarketoLinkedServiceTypeProperties + = new MarketoLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMarketoLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMarketoLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java index c9df54d7f9dd..c1969f4c473e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessLinkedServiceTypeProperties.java @@ -6,51 +6,50 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Microsoft Access linked service properties. */ @Fluent -public final class MicrosoftAccessLinkedServiceTypeProperties { +public final class MicrosoftAccessLinkedServiceTypeProperties + implements JsonSerializable { /* * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous * and Basic. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The access credential portion of the connection string specified in driver-specific property-value format. */ - @JsonProperty(value = "credential") private SecretBase credential; /* * User name for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -211,4 +210,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MicrosoftAccessLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessLinkedServiceTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MicrosoftAccessLinkedServiceTypeProperties. + */ + public static MicrosoftAccessLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessLinkedServiceTypeProperties deserializedMicrosoftAccessLinkedServiceTypeProperties + = new MicrosoftAccessLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.credential = SecretBase.fromJson(reader); + } else if ("userName".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMicrosoftAccessLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMicrosoftAccessLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java index ebb1b7bd1acb..a14e425b9be8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MicrosoftAccessTableDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Microsoft Access table dataset properties. */ @Fluent -public final class MicrosoftAccessTableDatasetTypeProperties { +public final class MicrosoftAccessTableDatasetTypeProperties + implements JsonSerializable { /* * The Microsoft Access table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +55,41 @@ public MicrosoftAccessTableDatasetTypeProperties withTableName(Object tableName) */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessTableDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MicrosoftAccessTableDatasetTypeProperties. + */ + public static MicrosoftAccessTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessTableDatasetTypeProperties deserializedMicrosoftAccessTableDatasetTypeProperties + = new MicrosoftAccessTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedMicrosoftAccessTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMicrosoftAccessTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java index 03eb39c8e05c..23c67f477e6a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasCollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MongoDB Atlas database dataset properties. */ @Fluent -public final class MongoDbAtlasCollectionDatasetTypeProperties { +public final class MongoDbAtlasCollectionDatasetTypeProperties + implements JsonSerializable { /* * The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collection", required = true) private Object collection; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbAtlasCollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collection", this.collection); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasCollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasCollectionDatasetTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbAtlasCollectionDatasetTypeProperties. + */ + public static MongoDbAtlasCollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasCollectionDatasetTypeProperties deserializedMongoDbAtlasCollectionDatasetTypeProperties + = new MongoDbAtlasCollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collection".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDatasetTypeProperties.collection = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbAtlasCollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java index 82cd78e28ba5..34870c6d2d2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbAtlasLinkedServiceTypeProperties.java @@ -6,32 +6,34 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MongoDB Atlas linked service properties. */ @Fluent -public final class MongoDbAtlasLinkedServiceTypeProperties { +public final class MongoDbAtlasLinkedServiceTypeProperties + implements JsonSerializable { /* * The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, * SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "database", required = true) private Object database; /* * The driver version that you want to choose. Allowed value are v1 and v2. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "driverVersion") private Object driverVersion; /** @@ -125,4 +127,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbAtlasLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("driverVersion", this.driverVersion); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbAtlasLinkedServiceTypeProperties. + */ + public static MongoDbAtlasLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasLinkedServiceTypeProperties deserializedMongoDbAtlasLinkedServiceTypeProperties + = new MongoDbAtlasLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedMongoDbAtlasLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedMongoDbAtlasLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("driverVersion".equals(fieldName)) { + deserializedMongoDbAtlasLinkedServiceTypeProperties.driverVersion = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbAtlasLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java index d3378c415e90..58120f8e8dc9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbCollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MongoDB database dataset properties. */ @Fluent -public final class MongoDbCollectionDatasetTypeProperties { +public final class MongoDbCollectionDatasetTypeProperties + implements JsonSerializable { /* * The table name of the MongoDB database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collectionName", required = true) private Object collectionName; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbCollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collectionName", this.collectionName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbCollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbCollectionDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbCollectionDatasetTypeProperties. + */ + public static MongoDbCollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbCollectionDatasetTypeProperties deserializedMongoDbCollectionDatasetTypeProperties + = new MongoDbCollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collectionName".equals(fieldName)) { + deserializedMongoDbCollectionDatasetTypeProperties.collectionName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbCollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java index bd7cb6e0ebdb..08bd9fa1d35f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbLinkedServiceTypeProperties.java @@ -6,77 +6,71 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.MongoDbAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * MongoDB linked service properties. */ @Fluent -public final class MongoDbLinkedServiceTypeProperties { +public final class MongoDbLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server", required = true) private Object server; /* * The authentication type to be used to connect to the MongoDB database. */ - @JsonProperty(value = "authenticationType") private MongoDbAuthenticationType authenticationType; /* * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "databaseName", required = true) private Object databaseName; /* * Username for authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * Database to verify the username and password. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "authSource") private Object authSource; /* * The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. * Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -321,4 +315,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("databaseName", this.databaseName); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("authSource", this.authSource); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbLinkedServiceTypeProperties. + */ + public static MongoDbLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbLinkedServiceTypeProperties deserializedMongoDbLinkedServiceTypeProperties + = new MongoDbLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("databaseName".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.databaseName = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.authenticationType + = MongoDbAuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("authSource".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.authSource = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("enableSsl".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMongoDbLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java index 1077f38da50d..137c4d2a25b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2CollectionDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MongoDB database dataset properties. */ @Fluent -public final class MongoDbV2CollectionDatasetTypeProperties { +public final class MongoDbV2CollectionDatasetTypeProperties + implements JsonSerializable { /* * The collection name of the MongoDB database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "collection", required = true) private Object collection; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbV2CollectionDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("collection", this.collection); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2CollectionDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2CollectionDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbV2CollectionDatasetTypeProperties. + */ + public static MongoDbV2CollectionDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2CollectionDatasetTypeProperties deserializedMongoDbV2CollectionDatasetTypeProperties + = new MongoDbV2CollectionDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("collection".equals(fieldName)) { + deserializedMongoDbV2CollectionDatasetTypeProperties.collection = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbV2CollectionDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java index 68cba0d4b2ea..bb4fedcd2bbc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MongoDbV2LinkedServiceTypeProperties.java @@ -6,24 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MongoDB linked service properties. */ @Fluent -public final class MongoDbV2LinkedServiceTypeProperties { +public final class MongoDbV2LinkedServiceTypeProperties + implements JsonSerializable { /* * The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, * SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database", required = true) private Object database; /** @@ -95,4 +98,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbV2LinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("database", this.database); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2LinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbV2LinkedServiceTypeProperties. + */ + public static MongoDbV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2LinkedServiceTypeProperties deserializedMongoDbV2LinkedServiceTypeProperties + = new MongoDbV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedMongoDbV2LinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedMongoDbV2LinkedServiceTypeProperties.database = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMongoDbV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java index 3905807cb639..c623d9ccaf2c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlLinkedServiceTypeProperties.java @@ -5,74 +5,68 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * MySQL linked service properties. */ @Fluent -public final class MySqlLinkedServiceTypeProperties { +public final class MySqlLinkedServiceTypeProperties implements JsonSerializable { /* * The version of the MySQL driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support * connection string and property bag, V2 can only support connection string. */ - @JsonProperty(value = "driverVersion") private Object driverVersion; /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * Server name for connection. Type: string. */ - @JsonProperty(value = "server") private Object server; /* * The port for the connection. Type: integer. */ - @JsonProperty(value = "port") private Object port; /* * Username for authentication. Type: string. */ - @JsonProperty(value = "username") private Object username; /* * Database name for connection. Type: string. */ - @JsonProperty(value = "database") private Object database; /* * SSL mode for connection. Type: integer. 0: disable, 1: prefer, 2: require, 3: verify-ca, 4: verify-full. */ - @JsonProperty(value = "sslMode") private Object sslMode; /* * Use system trust store for connection. Type: integer. 0: enable, 1: disable. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -301,4 +295,69 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("driverVersion", this.driverVersion); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("sslMode", this.sslMode); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MySqlLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MySqlLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MySqlLinkedServiceTypeProperties. + */ + public static MySqlLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MySqlLinkedServiceTypeProperties deserializedMySqlLinkedServiceTypeProperties + = new MySqlLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("driverVersion".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.driverVersion = reader.readUntyped(); + } else if ("connectionString".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("server".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("sslMode".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.sslMode = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedMySqlLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMySqlLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlTableDatasetTypeProperties.java index 1f703607b618..b4c76ac43aac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/MySqlTableDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * MySql table dataset properties. */ @Fluent -public final class MySqlTableDatasetTypeProperties { +public final class MySqlTableDatasetTypeProperties implements JsonSerializable { /* * The MySQL table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +54,41 @@ public MySqlTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MySqlTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MySqlTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MySqlTableDatasetTypeProperties. + */ + public static MySqlTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MySqlTableDatasetTypeProperties deserializedMySqlTableDatasetTypeProperties + = new MySqlTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedMySqlTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMySqlTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java index 11544642ff36..7726af5367fe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaLinkedServiceTypeProperties.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Netezza linked service properties. */ @Fluent -public final class NetezzaLinkedServiceTypeProperties { +public final class NetezzaLinkedServiceTypeProperties implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "pwd") private AzureKeyVaultSecretReference pwd; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +113,47 @@ public void validate() { pwd().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("pwd", this.pwd); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the NetezzaLinkedServiceTypeProperties. + */ + public static NetezzaLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaLinkedServiceTypeProperties deserializedNetezzaLinkedServiceTypeProperties + = new NetezzaLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedNetezzaLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("pwd".equals(fieldName)) { + deserializedNetezzaLinkedServiceTypeProperties.pwd = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedNetezzaLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedNetezzaLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaTableDatasetTypeProperties.java index 9c42b6c7c594..ada67e37b9c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/NetezzaTableDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Netezza dataset properties. */ @Fluent -public final class NetezzaTableDatasetTypeProperties { +public final class NetezzaTableDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Netezza. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Netezza. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,47 @@ public NetezzaTableDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the NetezzaTableDatasetTypeProperties. + */ + public static NetezzaTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaTableDatasetTypeProperties deserializedNetezzaTableDatasetTypeProperties + = new NetezzaTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedNetezzaTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedNetezzaTableDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedNetezzaTableDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedNetezzaTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java index c8f09ec0de4c..bf1186a6db08 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataLinkedServiceTypeProperties.java @@ -6,59 +6,56 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ODataAadServicePrincipalCredentialType; import com.azure.resourcemanager.datafactory.models.ODataAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * OData linked service properties. */ @Fluent -public final class ODataLinkedServiceTypeProperties { +public final class ODataLinkedServiceTypeProperties implements JsonSerializable { /* * The URL of the OData service endpoint. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * Type of authentication used to connect to the OData service. */ - @JsonProperty(value = "authenticationType") private ODataAuthenticationType authenticationType; /* * User name of the OData service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password of the OData service. */ - @JsonProperty(value = "password") private SecretBase password; /* * The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value * should be string type). */ - @JsonProperty(value = "authHeaders") private Object authHeaders; /* * Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* @@ -66,48 +63,41 @@ public final class ODataLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "aadResourceId") private Object aadResourceId; /* * Specify the credential type (key or cert) is used for service principal. */ - @JsonProperty(value = "aadServicePrincipalCredentialType") private ODataAadServicePrincipalCredentialType aadServicePrincipalCredentialType; /* * Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalEmbeddedCert") private SecretBase servicePrincipalEmbeddedCert; /* * Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal * authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalEmbeddedCertPassword") private SecretBase servicePrincipalEmbeddedCertPassword; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -448,4 +438,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ODataLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("authHeaders", this.authHeaders); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeUntypedField("aadResourceId", this.aadResourceId); + jsonWriter.writeStringField("aadServicePrincipalCredentialType", + this.aadServicePrincipalCredentialType == null ? null : this.aadServicePrincipalCredentialType.toString()); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeJsonField("servicePrincipalEmbeddedCert", this.servicePrincipalEmbeddedCert); + jsonWriter.writeJsonField("servicePrincipalEmbeddedCertPassword", this.servicePrincipalEmbeddedCertPassword); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ODataLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ODataLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ODataLinkedServiceTypeProperties. + */ + public static ODataLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ODataLinkedServiceTypeProperties deserializedODataLinkedServiceTypeProperties + = new ODataLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.authenticationType + = ODataAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("authHeaders".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.authHeaders = reader.readUntyped(); + } else if ("tenant".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("aadResourceId".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.aadResourceId = reader.readUntyped(); + } else if ("aadServicePrincipalCredentialType".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.aadServicePrincipalCredentialType + = ODataAadServicePrincipalCredentialType.fromString(reader.getString()); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("servicePrincipalEmbeddedCert".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.servicePrincipalEmbeddedCert + = SecretBase.fromJson(reader); + } else if ("servicePrincipalEmbeddedCertPassword".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.servicePrincipalEmbeddedCertPassword + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedODataLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedODataLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataResourceDatasetTypeProperties.java index cf445f03fbde..1a798bd92d44 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ODataResourceDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * OData dataset properties. */ @Fluent -public final class ODataResourceDatasetTypeProperties { +public final class ODataResourceDatasetTypeProperties implements JsonSerializable { /* * The OData resource path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path") private Object path; /** @@ -51,4 +54,41 @@ public ODataResourceDatasetTypeProperties withPath(Object path) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ODataResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ODataResourceDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ODataResourceDatasetTypeProperties. + */ + public static ODataResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ODataResourceDatasetTypeProperties deserializedODataResourceDatasetTypeProperties + = new ODataResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("path".equals(fieldName)) { + deserializedODataResourceDatasetTypeProperties.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedODataResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java index c6ee353906bb..3d02d96dba49 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcLinkedServiceTypeProperties.java @@ -6,51 +6,49 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * ODBC linked service properties. */ @Fluent -public final class OdbcLinkedServiceTypeProperties { +public final class OdbcLinkedServiceTypeProperties implements JsonSerializable { /* * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: * string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType string. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The access credential portion of the connection string specified in driver-specific property-value format. */ - @JsonProperty(value = "credential") private SecretBase credential; /* * User name for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -211,4 +209,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OdbcLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OdbcLinkedServiceTypeProperties. + */ + public static OdbcLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcLinkedServiceTypeProperties deserializedOdbcLinkedServiceTypeProperties + = new OdbcLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.credential = SecretBase.fromJson(reader); + } else if ("userName".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedOdbcLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOdbcLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcTableDatasetTypeProperties.java index b21d8632be47..97701bac19df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OdbcTableDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * ODBC table dataset properties. */ @Fluent -public final class OdbcTableDatasetTypeProperties { +public final class OdbcTableDatasetTypeProperties implements JsonSerializable { /* * The ODBC table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +54,41 @@ public OdbcTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OdbcTableDatasetTypeProperties. + */ + public static OdbcTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcTableDatasetTypeProperties deserializedOdbcTableDatasetTypeProperties + = new OdbcTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedOdbcTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedOdbcTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java index da50c6f48f46..b21d647b6ef4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365DatasetTypeProperties.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Office365 dataset properties. */ @Fluent -public final class Office365DatasetTypeProperties { +public final class Office365DatasetTypeProperties implements JsonSerializable { /* * Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName", required = true) private Object tableName; /* * A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "predicate") private Object predicate; /** @@ -90,4 +92,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Office365DatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("predicate", this.predicate); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Office365DatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Office365DatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Office365DatasetTypeProperties. + */ + public static Office365DatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Office365DatasetTypeProperties deserializedOffice365DatasetTypeProperties + = new Office365DatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedOffice365DatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("predicate".equals(fieldName)) { + deserializedOffice365DatasetTypeProperties.predicate = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedOffice365DatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java index 0d1fa7b5dde9..abb36f2eb687 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Office365LinkedServiceTypeProperties.java @@ -6,44 +6,44 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Office365 linked service properties. */ @Fluent -public final class Office365LinkedServiceTypeProperties { +public final class Office365LinkedServiceTypeProperties + implements JsonSerializable { /* * Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "office365TenantId", required = true) private Object office365TenantId; /* * Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "servicePrincipalTenantId", required = true) private Object servicePrincipalTenantId; /* * Specify the application's client ID. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId", required = true) private Object servicePrincipalId; /* * Specify the application's key. */ - @JsonProperty(value = "servicePrincipalKey", required = true) private SecretBase servicePrincipalKey; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -191,4 +191,54 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Office365LinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("office365TenantId", this.office365TenantId); + jsonWriter.writeUntypedField("servicePrincipalTenantId", this.servicePrincipalTenantId); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Office365LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Office365LinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Office365LinkedServiceTypeProperties. + */ + public static Office365LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Office365LinkedServiceTypeProperties deserializedOffice365LinkedServiceTypeProperties + = new Office365LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("office365TenantId".equals(fieldName)) { + deserializedOffice365LinkedServiceTypeProperties.office365TenantId = reader.readUntyped(); + } else if ("servicePrincipalTenantId".equals(fieldName)) { + deserializedOffice365LinkedServiceTypeProperties.servicePrincipalTenantId = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedOffice365LinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedOffice365LinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedOffice365LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOffice365LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationInner.java index 99757b731906..64b62d681381 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationInner.java @@ -5,37 +5,37 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.OperationDisplay; import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Azure Data Factory API operation definition. */ @Fluent -public final class OperationInner { +public final class OperationInner implements JsonSerializable { /* * Operation name: {provider}/{resource}/{operation} */ - @JsonProperty(value = "name") private String name; /* * The intended executor of the operation. */ - @JsonProperty(value = "origin") private String origin; /* * Metadata associated with the operation. */ - @JsonProperty(value = "display") private OperationDisplay display; /* * Additional details about the operation. */ - @JsonProperty(value = "properties") private OperationProperties innerProperties; /** @@ -149,4 +149,49 @@ public void validate() { innerProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("origin", this.origin); + jsonWriter.writeJsonField("display", this.display); + jsonWriter.writeJsonField("properties", this.innerProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationInner if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationInner. + */ + public static OperationInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationInner deserializedOperationInner = new OperationInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedOperationInner.name = reader.getString(); + } else if ("origin".equals(fieldName)) { + deserializedOperationInner.origin = reader.getString(); + } else if ("display".equals(fieldName)) { + deserializedOperationInner.display = OperationDisplay.fromJson(reader); + } else if ("properties".equals(fieldName)) { + deserializedOperationInner.innerProperties = OperationProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationProperties.java index 709dfac44ef1..39095065751f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OperationProperties.java @@ -5,18 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Additional details about an operation. */ @Fluent -public final class OperationProperties { +public final class OperationProperties implements JsonSerializable { /* * Details about a service operation. */ - @JsonProperty(value = "serviceSpecification") private OperationServiceSpecification serviceSpecification; /** @@ -55,4 +58,41 @@ public void validate() { serviceSpecification().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("serviceSpecification", this.serviceSpecification); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationProperties if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationProperties. + */ + public static OperationProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationProperties deserializedOperationProperties = new OperationProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serviceSpecification".equals(fieldName)) { + deserializedOperationProperties.serviceSpecification + = OperationServiceSpecification.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java index d8c97cd23307..6a5d5c84230c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleCloudStorageLinkedServiceTypeProperties.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Oracle Cloud Storage linked service properties. */ @Fluent -public final class OracleCloudStorageLinkedServiceTypeProperties { +public final class OracleCloudStorageLinkedServiceTypeProperties + implements JsonSerializable { /* * The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "accessKeyId") private Object accessKeyId; /* * The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. */ - @JsonProperty(value = "secretAccessKey") private SecretBase secretAccessKey; /* @@ -31,14 +34,12 @@ public final class OracleCloudStorageLinkedServiceTypeProperties { * property; change it only if you want to try a different service endpoint or want to switch between https and * http. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serviceUrl") private Object serviceUrl; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -147,4 +148,51 @@ public void validate() { secretAccessKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accessKeyId", this.accessKeyId); + jsonWriter.writeJsonField("secretAccessKey", this.secretAccessKey); + jsonWriter.writeUntypedField("serviceUrl", this.serviceUrl); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleCloudStorageLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleCloudStorageLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleCloudStorageLinkedServiceTypeProperties. + */ + public static OracleCloudStorageLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleCloudStorageLinkedServiceTypeProperties deserializedOracleCloudStorageLinkedServiceTypeProperties + = new OracleCloudStorageLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accessKeyId".equals(fieldName)) { + deserializedOracleCloudStorageLinkedServiceTypeProperties.accessKeyId = reader.readUntyped(); + } else if ("secretAccessKey".equals(fieldName)) { + deserializedOracleCloudStorageLinkedServiceTypeProperties.secretAccessKey + = SecretBase.fromJson(reader); + } else if ("serviceUrl".equals(fieldName)) { + deserializedOracleCloudStorageLinkedServiceTypeProperties.serviceUrl = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedOracleCloudStorageLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOracleCloudStorageLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java index 6f0651c9c565..5963cc23f860 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleLinkedServiceTypeProperties.java @@ -6,31 +6,32 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Oracle database linked service properties. */ @Fluent -public final class OracleLinkedServiceTypeProperties { +public final class OracleLinkedServiceTypeProperties implements JsonSerializable { /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -120,4 +121,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OracleLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleLinkedServiceTypeProperties. + */ + public static OracleLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleLinkedServiceTypeProperties deserializedOracleLinkedServiceTypeProperties + = new OracleLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedOracleLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedOracleLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedOracleLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOracleLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java index 159a7c8f8551..bf5f4e6be265 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleServiceCloudLinkedServiceTypeProperties.java @@ -6,58 +6,56 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Oracle Service Cloud linked service properties. */ @Fluent -public final class OracleServiceCloudLinkedServiceTypeProperties { +public final class OracleServiceCloudLinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of the Oracle Service Cloud instance. */ - @JsonProperty(value = "host", required = true) private Object host; /* * The user name that you use to access Oracle Service Cloud server. */ - @JsonProperty(value = "username", required = true) private Object username; /* * The password corresponding to the user name that you provided in the username key. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -242,4 +240,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OracleServiceCloudLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleServiceCloudLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleServiceCloudLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleServiceCloudLinkedServiceTypeProperties. + */ + public static OracleServiceCloudLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleServiceCloudLinkedServiceTypeProperties deserializedOracleServiceCloudLinkedServiceTypeProperties + = new OracleServiceCloudLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.useEncryptedEndpoints + = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.useHostVerification + = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.usePeerVerification + = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedOracleServiceCloudLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOracleServiceCloudLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleTableDatasetTypeProperties.java index 993e07f34fdd..c090c29731bd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OracleTableDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * On-premises Oracle dataset properties. */ @Fluent -public final class OracleTableDatasetTypeProperties { +public final class OracleTableDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -109,4 +110,47 @@ public OracleTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleTableDatasetTypeProperties. + */ + public static OracleTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleTableDatasetTypeProperties deserializedOracleTableDatasetTypeProperties + = new OracleTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedOracleTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedOracleTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedOracleTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedOracleTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java index ac65b24d0270..5eb2bb9b4954 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/OrcDatasetTypeProperties.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * ORC dataset properties. */ @Fluent -public final class OrcDatasetTypeProperties { +public final class OrcDatasetTypeProperties implements JsonSerializable { /* * The location of the ORC data storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The data orcCompressionCodec. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "orcCompressionCodec") private Object orcCompressionCodec; /** @@ -90,4 +92,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OrcDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("orcCompressionCodec", this.orcCompressionCodec); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OrcDatasetTypeProperties. + */ + public static OrcDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcDatasetTypeProperties deserializedOrcDatasetTypeProperties = new OrcDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedOrcDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("orcCompressionCodec".equals(fieldName)) { + deserializedOrcDatasetTypeProperties.orcCompressionCodec = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedOrcDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java index c1c89660dc77..c4f245fc2abc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ParquetDatasetTypeProperties.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Parquet dataset properties. */ @Fluent -public final class ParquetDatasetTypeProperties { +public final class ParquetDatasetTypeProperties implements JsonSerializable { /* * The location of the parquet storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* * The data compressionCodec. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "compressionCodec") private Object compressionCodec; /** @@ -90,4 +92,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ParquetDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("compressionCodec", this.compressionCodec); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ParquetDatasetTypeProperties. + */ + public static ParquetDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetDatasetTypeProperties deserializedParquetDatasetTypeProperties = new ParquetDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedParquetDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("compressionCodec".equals(fieldName)) { + deserializedParquetDatasetTypeProperties.compressionCodec = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedParquetDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java index afc74a312626..ed2faa0ff9ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PaypalLinkedServiceTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Paypal Service linked service properties. */ @Fluent -public final class PaypalLinkedServiceTypeProperties { +public final class PaypalLinkedServiceTypeProperties implements JsonSerializable { /* * The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The client ID associated with your PayPal application. */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * The client secret associated with your PayPal application. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -234,4 +231,60 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PaypalLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PaypalLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PaypalLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PaypalLinkedServiceTypeProperties. + */ + public static PaypalLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PaypalLinkedServiceTypeProperties deserializedPaypalLinkedServiceTypeProperties + = new PaypalLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedPaypalLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPaypalLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixDatasetTypeProperties.java index bc81a0ca2811..700c4a80833c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Phoenix Dataset Properties. */ @Fluent -public final class PhoenixDatasetTypeProperties { +public final class PhoenixDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Phoenix. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Phoenix. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public PhoenixDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PhoenixDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PhoenixDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PhoenixDatasetTypeProperties. + */ + public static PhoenixDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PhoenixDatasetTypeProperties deserializedPhoenixDatasetTypeProperties = new PhoenixDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedPhoenixDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedPhoenixDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedPhoenixDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedPhoenixDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java index 1ff647dd4b38..c133f3b3616b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PhoenixLinkedServiceTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PhoenixAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Phoenix server linked service properties. */ @Fluent -public final class PhoenixLinkedServiceTypeProperties { +public final class PhoenixLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. */ - @JsonProperty(value = "port") private Object port; /* * The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value * is hbasephoenix if using WindowsAzureHDInsightService. */ - @JsonProperty(value = "httpPath") private Object httpPath; /* * The authentication mechanism used to connect to the Phoenix server. */ - @JsonProperty(value = "authenticationType", required = true) private PhoenixAuthenticationType authenticationType; /* * The user name used to connect to the Phoenix server. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -63,34 +60,29 @@ public final class PhoenixLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -379,4 +371,77 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PhoenixLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("httpPath", this.httpPath); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PhoenixLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PhoenixLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PhoenixLinkedServiceTypeProperties. + */ + public static PhoenixLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PhoenixLinkedServiceTypeProperties deserializedPhoenixLinkedServiceTypeProperties + = new PhoenixLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.authenticationType + = PhoenixAuthenticationType.fromString(reader.getString()); + } else if ("port".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("httpPath".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.httpPath = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("enableSsl".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedPhoenixLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPhoenixLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Pipeline.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Pipeline.java index 89f8b315501f..442f5a63fdef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Pipeline.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/Pipeline.java @@ -5,13 +5,16 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.PipelineFolder; import com.azure.resourcemanager.datafactory.models.PipelinePolicy; import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -19,62 +22,50 @@ * A data factory pipeline. */ @Fluent -public final class Pipeline { +public final class Pipeline implements JsonSerializable { /* * The description of the pipeline. */ - @JsonProperty(value = "description") private String description; /* * List of activities in pipeline. */ - @JsonProperty(value = "activities") private List activities; /* * List of parameters for pipeline. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * List of variables for pipeline. */ - @JsonProperty(value = "variables") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map variables; /* * The max number of concurrent runs for the pipeline. */ - @JsonProperty(value = "concurrency") private Integer concurrency; /* * List of tags that can be used for describing the Pipeline. */ - @JsonProperty(value = "annotations") private List annotations; /* * Dimensions emitted by Pipeline. */ - @JsonProperty(value = "runDimensions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map runDimensions; /* * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */ - @JsonProperty(value = "folder") private PipelineFolder folder; /* * Pipeline Policy. */ - @JsonProperty(value = "policy") private PipelinePolicy policy; /** @@ -295,4 +286,72 @@ public void validate() { policy().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeArrayField("activities", this.activities, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("variables", this.variables, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeNumberField("concurrency", this.concurrency); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("runDimensions", this.runDimensions, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", this.folder); + jsonWriter.writeJsonField("policy", this.policy); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Pipeline from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Pipeline if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the Pipeline. + */ + public static Pipeline fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Pipeline deserializedPipeline = new Pipeline(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedPipeline.description = reader.getString(); + } else if ("activities".equals(fieldName)) { + List activities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedPipeline.activities = activities; + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPipeline.parameters = parameters; + } else if ("variables".equals(fieldName)) { + Map variables + = reader.readMap(reader1 -> VariableSpecification.fromJson(reader1)); + deserializedPipeline.variables = variables; + } else if ("concurrency".equals(fieldName)) { + deserializedPipeline.concurrency = reader.getNullable(JsonReader::getInt); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPipeline.annotations = annotations; + } else if ("runDimensions".equals(fieldName)) { + Map runDimensions = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedPipeline.runDimensions = runDimensions; + } else if ("folder".equals(fieldName)) { + deserializedPipeline.folder = PipelineFolder.fromJson(reader); + } else if ("policy".equals(fieldName)) { + deserializedPipeline.policy = PipelinePolicy.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedPipeline; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java index 5da9e25e172a..33267d2c9f75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineResourceInner.java @@ -7,15 +7,15 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.ParameterSpecification; import com.azure.resourcemanager.datafactory.models.PipelineFolder; import com.azure.resourcemanager.datafactory.models.PipelinePolicy; import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -28,31 +28,26 @@ public final class PipelineResourceInner extends SubResource { /* * Properties of the pipeline. */ - @JsonProperty(value = "properties", required = true) private Pipeline innerProperties = new Pipeline(); /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /* * Pipeline resource type. */ - @JsonIgnore private Map additionalProperties; /** @@ -102,7 +97,6 @@ public String etag() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -118,14 +112,6 @@ public PipelineResourceInner withAdditionalProperties(Map additi return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * {@inheritDoc} */ @@ -360,4 +346,61 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PipelineResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.innerProperties); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PipelineResourceInner. + */ + public static PipelineResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineResourceInner deserializedPipelineResourceInner = new PipelineResourceInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedPipelineResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedPipelineResourceInner.innerProperties = Pipeline.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedPipelineResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedPipelineResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedPipelineResourceInner.etag = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPipelineResourceInner.additionalProperties = additionalProperties; + + return deserializedPipelineResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunInner.java index d6e8e5080da5..f1a71726fd36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunInner.java @@ -5,12 +5,13 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PipelineRunInvokedBy; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.Map; @@ -19,91 +20,75 @@ * Information about a pipeline run. */ @Fluent -public final class PipelineRunInner { +public final class PipelineRunInner implements JsonSerializable { /* * Identifier of a run. */ - @JsonProperty(value = "runId", access = JsonProperty.Access.WRITE_ONLY) private String runId; /* * Identifier that correlates all the recovery runs of a pipeline run. */ - @JsonProperty(value = "runGroupId", access = JsonProperty.Access.WRITE_ONLY) private String runGroupId; /* * Indicates if the recovered pipeline run is the latest in its group. */ - @JsonProperty(value = "isLatest", access = JsonProperty.Access.WRITE_ONLY) private Boolean isLatest; /* * The pipeline name. */ - @JsonProperty(value = "pipelineName", access = JsonProperty.Access.WRITE_ONLY) private String pipelineName; /* * The full or partial list of parameter name, value pair used in the pipeline run. */ - @JsonProperty(value = "parameters", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * Run dimensions emitted by Pipeline run. */ - @JsonProperty(value = "runDimensions", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map runDimensions; /* * Entity that started the pipeline run. */ - @JsonProperty(value = "invokedBy", access = JsonProperty.Access.WRITE_ONLY) private PipelineRunInvokedBy invokedBy; /* * The last updated timestamp for the pipeline run event in ISO8601 format. */ - @JsonProperty(value = "lastUpdated", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastUpdated; /* * The start time of a pipeline run in ISO8601 format. */ - @JsonProperty(value = "runStart", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime runStart; /* * The end time of a pipeline run in ISO8601 format. */ - @JsonProperty(value = "runEnd", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime runEnd; /* * The duration of a pipeline run. */ - @JsonProperty(value = "durationInMs", access = JsonProperty.Access.WRITE_ONLY) private Integer durationInMs; /* * The status of a pipeline run. Possible values: Queued, InProgress, Succeeded, Failed, Canceling, Cancelled */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private String status; /* * The message from a pipeline run. */ - @JsonProperty(value = "message", access = JsonProperty.Access.WRITE_ONLY) private String message; /* * Information about a pipeline run. */ - @JsonIgnore private Map additionalProperties; /** @@ -235,7 +220,6 @@ public String message() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -251,14 +235,6 @@ public PipelineRunInner withAdditionalProperties(Map additionalP return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -269,4 +245,79 @@ public void validate() { invokedBy().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineRunInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineRunInner if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelineRunInner. + */ + public static PipelineRunInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineRunInner deserializedPipelineRunInner = new PipelineRunInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("runId".equals(fieldName)) { + deserializedPipelineRunInner.runId = reader.getString(); + } else if ("runGroupId".equals(fieldName)) { + deserializedPipelineRunInner.runGroupId = reader.getString(); + } else if ("isLatest".equals(fieldName)) { + deserializedPipelineRunInner.isLatest = reader.getNullable(JsonReader::getBoolean); + } else if ("pipelineName".equals(fieldName)) { + deserializedPipelineRunInner.pipelineName = reader.getString(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.getString()); + deserializedPipelineRunInner.parameters = parameters; + } else if ("runDimensions".equals(fieldName)) { + Map runDimensions = reader.readMap(reader1 -> reader1.getString()); + deserializedPipelineRunInner.runDimensions = runDimensions; + } else if ("invokedBy".equals(fieldName)) { + deserializedPipelineRunInner.invokedBy = PipelineRunInvokedBy.fromJson(reader); + } else if ("lastUpdated".equals(fieldName)) { + deserializedPipelineRunInner.lastUpdated = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("runStart".equals(fieldName)) { + deserializedPipelineRunInner.runStart = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("runEnd".equals(fieldName)) { + deserializedPipelineRunInner.runEnd = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("durationInMs".equals(fieldName)) { + deserializedPipelineRunInner.durationInMs = reader.getNullable(JsonReader::getInt); + } else if ("status".equals(fieldName)) { + deserializedPipelineRunInner.status = reader.getString(); + } else if ("message".equals(fieldName)) { + deserializedPipelineRunInner.message = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPipelineRunInner.additionalProperties = additionalProperties; + + return deserializedPipelineRunInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java index e072cc481be0..fd12f07854f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PipelineRunsQueryResponseInner.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A list pipeline runs. */ @Fluent -public final class PipelineRunsQueryResponseInner { +public final class PipelineRunsQueryResponseInner implements JsonSerializable { /* * List of pipeline runs. */ - @JsonProperty(value = "value", required = true) private List value; /* * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /** @@ -90,4 +92,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PipelineRunsQueryResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineRunsQueryResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineRunsQueryResponseInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PipelineRunsQueryResponseInner. + */ + public static PipelineRunsQueryResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineRunsQueryResponseInner deserializedPipelineRunsQueryResponseInner + = new PipelineRunsQueryResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> PipelineRunInner.fromJson(reader1)); + deserializedPipelineRunsQueryResponseInner.value = value; + } else if ("continuationToken".equals(fieldName)) { + deserializedPipelineRunsQueryResponseInner.continuationToken = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineRunsQueryResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java index f8591a922682..2748a48ca0d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * PostgreSQL linked service properties. */ @Fluent -public final class PostgreSqlLinkedServiceTypeProperties { +public final class PostgreSqlLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -120,4 +122,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PostgreSqlLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlLinkedServiceTypeProperties. + */ + public static PostgreSqlLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlLinkedServiceTypeProperties deserializedPostgreSqlLinkedServiceTypeProperties + = new PostgreSqlLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedPostgreSqlLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedPostgreSqlLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedPostgreSqlLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPostgreSqlLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlTableDatasetTypeProperties.java index 700d0299cc23..c9584d57c8af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlTableDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * PostgreSQL table dataset properties. */ @Fluent -public final class PostgreSqlTableDatasetTypeProperties { +public final class PostgreSqlTableDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The PostgreSQL table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The PostgreSQL schema name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +107,47 @@ public PostgreSqlTableDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PostgreSqlTableDatasetTypeProperties. + */ + public static PostgreSqlTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlTableDatasetTypeProperties deserializedPostgreSqlTableDatasetTypeProperties + = new PostgreSqlTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedPostgreSqlTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedPostgreSqlTableDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedPostgreSqlTableDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedPostgreSqlTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java index 03eeaae9fc31..81bcfed63827 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2LinkedServiceTypeProperties.java @@ -6,131 +6,117 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * PostgreSqlV2 linked service properties. */ @Fluent -public final class PostgreSqlV2LinkedServiceTypeProperties { +public final class PostgreSqlV2LinkedServiceTypeProperties + implements JsonSerializable { /* * Server name for connection. Type: string. */ - @JsonProperty(value = "server", required = true) private Object server; /* * The port for the connection. Type: integer. */ - @JsonProperty(value = "port") private Object port; /* * Username for authentication. Type: string. */ - @JsonProperty(value = "username", required = true) private Object username; /* * Database name for connection. Type: string. */ - @JsonProperty(value = "database", required = true) private Object database; /* * SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. * Type: integer. */ - @JsonProperty(value = "sslMode", required = true) private Object sslMode; /* * Sets the schema search path. Type: string. */ - @JsonProperty(value = "schema") private Object schema; /* * Whether connection pooling should be used. Type: boolean. */ - @JsonProperty(value = "pooling") private Object pooling; /* * The time to wait (in seconds) while trying to establish a connection before terminating the attempt and * generating an error. Type: integer. */ - @JsonProperty(value = "connectionTimeout") private Object connectionTimeout; /* * The time to wait (in seconds) while trying to execute a command before terminating the attempt and generating an * error. Set to zero for infinity. Type: integer. */ - @JsonProperty(value = "commandTimeout") private Object commandTimeout; /* * Whether to trust the server certificate without validating it. Type: boolean. */ - @JsonProperty(value = "trustServerCertificate") private Object trustServerCertificate; /* * Location of a client certificate to be sent to the server. Type: string. */ - @JsonProperty(value = "sslCertificate") private Object sslCertificate; /* * Location of a client key for a client certificate to be sent to the server. Type: string. */ - @JsonProperty(value = "sslKey") private Object sslKey; /* * Password for a key for a client certificate. Type: string. */ - @JsonProperty(value = "sslPassword") private Object sslPassword; /* * Determines the size of the internal buffer uses when reading. Increasing may improve performance if transferring * large values from the database. Type: integer. */ - @JsonProperty(value = "readBufferSize") private Object readBufferSize; /* * When enabled, parameter values are logged when commands are executed. Type: boolean. */ - @JsonProperty(value = "logParameters") private Object logParameters; /* * Gets or sets the session timezone. Type: string. */ - @JsonProperty(value = "timezone") private Object timezone; /* * Gets or sets the .NET encoding that will be used to encode/decode PostgreSQL string data. Type: string */ - @JsonProperty(value = "encoding") private Object encoding; /* * The Azure key vault secret reference of password in connection string. Type: string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -569,4 +555,97 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PostgreSqlV2LinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("sslMode", this.sslMode); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("pooling", this.pooling); + jsonWriter.writeUntypedField("connectionTimeout", this.connectionTimeout); + jsonWriter.writeUntypedField("commandTimeout", this.commandTimeout); + jsonWriter.writeUntypedField("trustServerCertificate", this.trustServerCertificate); + jsonWriter.writeUntypedField("sslCertificate", this.sslCertificate); + jsonWriter.writeUntypedField("sslKey", this.sslKey); + jsonWriter.writeUntypedField("sslPassword", this.sslPassword); + jsonWriter.writeUntypedField("readBufferSize", this.readBufferSize); + jsonWriter.writeUntypedField("logParameters", this.logParameters); + jsonWriter.writeUntypedField("timezone", this.timezone); + jsonWriter.writeUntypedField("encoding", this.encoding); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlV2LinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlV2LinkedServiceTypeProperties. + */ + public static PostgreSqlV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlV2LinkedServiceTypeProperties deserializedPostgreSqlV2LinkedServiceTypeProperties + = new PostgreSqlV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("sslMode".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.sslMode = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.schema = reader.readUntyped(); + } else if ("pooling".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.pooling = reader.readUntyped(); + } else if ("connectionTimeout".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.connectionTimeout = reader.readUntyped(); + } else if ("commandTimeout".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.commandTimeout = reader.readUntyped(); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.trustServerCertificate = reader.readUntyped(); + } else if ("sslCertificate".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.sslCertificate = reader.readUntyped(); + } else if ("sslKey".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.sslKey = reader.readUntyped(); + } else if ("sslPassword".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.sslPassword = reader.readUntyped(); + } else if ("readBufferSize".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.readBufferSize = reader.readUntyped(); + } else if ("logParameters".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.logParameters = reader.readUntyped(); + } else if ("timezone".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.timezone = reader.readUntyped(); + } else if ("encoding".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.encoding = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedPostgreSqlV2LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPostgreSqlV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2TableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2TableDatasetTypeProperties.java index 7763656aaf50..4a245fc468c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2TableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PostgreSqlV2TableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * PostgreSQLV2 table dataset properties. */ @Fluent -public final class PostgreSqlV2TableDatasetTypeProperties { +public final class PostgreSqlV2TableDatasetTypeProperties + implements JsonSerializable { /* * The PostgreSQL table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The PostgreSQL schema name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -77,4 +80,44 @@ public PostgreSqlV2TableDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlV2TableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlV2TableDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PostgreSqlV2TableDatasetTypeProperties. + */ + public static PostgreSqlV2TableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlV2TableDatasetTypeProperties deserializedPostgreSqlV2TableDatasetTypeProperties + = new PostgreSqlV2TableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("table".equals(fieldName)) { + deserializedPostgreSqlV2TableDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedPostgreSqlV2TableDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedPostgreSqlV2TableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PowerQueryTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PowerQueryTypeProperties.java index 9627cb2e6660..cf06e59e45a4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PowerQueryTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PowerQueryTypeProperties.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PowerQuerySource; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Power Query data flow type properties. */ @Fluent -public final class PowerQueryTypeProperties { +public final class PowerQueryTypeProperties implements JsonSerializable { /* * List of sources in Power Query. */ - @JsonProperty(value = "sources") private List sources; /* * Power query mashup script. */ - @JsonProperty(value = "script") private String script; /* * Locale of the Power query mashup document. */ - @JsonProperty(value = "documentLocale") private String documentLocale; /** @@ -108,4 +109,47 @@ public void validate() { sources().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sources", this.sources, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("script", this.script); + jsonWriter.writeStringField("documentLocale", this.documentLocale); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PowerQueryTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PowerQueryTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PowerQueryTypeProperties. + */ + public static PowerQueryTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PowerQueryTypeProperties deserializedPowerQueryTypeProperties = new PowerQueryTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sources".equals(fieldName)) { + List sources = reader.readArray(reader1 -> PowerQuerySource.fromJson(reader1)); + deserializedPowerQueryTypeProperties.sources = sources; + } else if ("script".equals(fieldName)) { + deserializedPowerQueryTypeProperties.script = reader.getString(); + } else if ("documentLocale".equals(fieldName)) { + deserializedPowerQueryTypeProperties.documentLocale = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPowerQueryTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoDatasetTypeProperties.java index 9415b0eb249c..6cc4025ff6eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Presto Dataset Properties. */ @Fluent -public final class PrestoDatasetTypeProperties { +public final class PrestoDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Presto. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Presto. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public PrestoDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrestoDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrestoDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrestoDatasetTypeProperties. + */ + public static PrestoDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrestoDatasetTypeProperties deserializedPrestoDatasetTypeProperties = new PrestoDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedPrestoDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedPrestoDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedPrestoDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrestoDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java index cca0947a615d..131b1a2efdb5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrestoLinkedServiceTypeProperties.java @@ -6,61 +6,57 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PrestoAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Presto server linked service properties. */ @Fluent -public final class PrestoLinkedServiceTypeProperties { +public final class PrestoLinkedServiceTypeProperties implements JsonSerializable { /* * The IP address or host name of the Presto server. (i.e. 192.168.222.160) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The version of the Presto server. (i.e. 0.148-t) */ - @JsonProperty(value = "serverVersion", required = true) private Object serverVersion; /* * The catalog context for all request against the server. */ - @JsonProperty(value = "catalog", required = true) private Object catalog; /* * The TCP port that the Presto server uses to listen for client connections. The default value is 8080. */ - @JsonProperty(value = "port") private Object port; /* * The authentication mechanism used to connect to the Presto server. */ - @JsonProperty(value = "authenticationType", required = true) private PrestoAuthenticationType authenticationType; /* * The user name used to connect to the Presto server. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name. */ - @JsonProperty(value = "password") private SecretBase password; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -68,41 +64,35 @@ public final class PrestoLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone * Database. The default value is the system time zone. */ - @JsonProperty(value = "timeZoneID") private Object timeZoneId; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -441,4 +431,83 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PrestoLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("serverVersion", this.serverVersion); + jsonWriter.writeUntypedField("catalog", this.catalog); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeUntypedField("timeZoneID", this.timeZoneId); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrestoLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrestoLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PrestoLinkedServiceTypeProperties. + */ + public static PrestoLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrestoLinkedServiceTypeProperties deserializedPrestoLinkedServiceTypeProperties + = new PrestoLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("serverVersion".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.serverVersion = reader.readUntyped(); + } else if ("catalog".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.catalog = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.authenticationType + = PrestoAuthenticationType.fromString(reader.getString()); + } else if ("port".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("enableSsl".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("timeZoneID".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.timeZoneId = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedPrestoLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrestoLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateEndpointConnectionResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateEndpointConnectionResourceInner.java index d92ad6787930..1095619c826a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateEndpointConnectionResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateEndpointConnectionResourceInner.java @@ -6,8 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.RemotePrivateEndpointConnection; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Private Endpoint Connection ARM resource. @@ -17,25 +20,21 @@ public final class PrivateEndpointConnectionResourceInner extends SubResource { /* * Core resource properties */ - @JsonProperty(value = "properties") private RemotePrivateEndpointConnection properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -110,4 +109,51 @@ public void validate() { properties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateEndpointConnectionResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateEndpointConnectionResourceInner if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateEndpointConnectionResourceInner. + */ + public static PrivateEndpointConnectionResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateEndpointConnectionResourceInner deserializedPrivateEndpointConnectionResourceInner + = new PrivateEndpointConnectionResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedPrivateEndpointConnectionResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedPrivateEndpointConnectionResourceInner.properties + = RemotePrivateEndpointConnection.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedPrivateEndpointConnectionResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedPrivateEndpointConnectionResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedPrivateEndpointConnectionResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateEndpointConnectionResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java index 8a00d9f0cf51..ad8c27f1ab33 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/PrivateLinkResourcesWrapperInner.java @@ -6,19 +6,22 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.PrivateLinkResource; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Wrapper for a collection of private link resources. */ @Fluent -public final class PrivateLinkResourcesWrapperInner { +public final class PrivateLinkResourcesWrapperInner implements JsonSerializable { /* * The value property. */ - @JsonProperty(value = "value", required = true) private List value; /** @@ -63,4 +66,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PrivateLinkResourcesWrapperInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkResourcesWrapperInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkResourcesWrapperInner if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PrivateLinkResourcesWrapperInner. + */ + public static PrivateLinkResourcesWrapperInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkResourcesWrapperInner deserializedPrivateLinkResourcesWrapperInner + = new PrivateLinkResourcesWrapperInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> PrivateLinkResource.fromJson(reader1)); + deserializedPrivateLinkResourcesWrapperInner.value = value; + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkResourcesWrapperInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java index c26da7cb927e..737df0552e6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickBooksLinkedServiceTypeProperties.java @@ -5,68 +5,64 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * QuickBooks server linked service properties. */ @Fluent -public final class QuickBooksLinkedServiceTypeProperties { +public final class QuickBooksLinkedServiceTypeProperties + implements JsonSerializable { /* * Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked * service. Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */ - @JsonProperty(value = "endpoint") private Object endpoint; /* * The company ID of the QuickBooks company to authorize. */ - @JsonProperty(value = "companyId") private Object companyId; /* * The consumer key for OAuth 1.0 authentication. */ - @JsonProperty(value = "consumerKey") private Object consumerKey; /* * The consumer secret for OAuth 1.0 authentication. */ - @JsonProperty(value = "consumerSecret") private SecretBase consumerSecret; /* * The access token for OAuth 1.0 authentication. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * The access token secret for OAuth 1.0 authentication. */ - @JsonProperty(value = "accessTokenSecret") private SecretBase accessTokenSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -277,4 +273,65 @@ public void validate() { accessTokenSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("companyId", this.companyId); + jsonWriter.writeUntypedField("consumerKey", this.consumerKey); + jsonWriter.writeJsonField("consumerSecret", this.consumerSecret); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeJsonField("accessTokenSecret", this.accessTokenSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickBooksLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickBooksLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the QuickBooksLinkedServiceTypeProperties. + */ + public static QuickBooksLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickBooksLinkedServiceTypeProperties deserializedQuickBooksLinkedServiceTypeProperties + = new QuickBooksLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("endpoint".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("companyId".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.companyId = reader.readUntyped(); + } else if ("consumerKey".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.consumerKey = reader.readUntyped(); + } else if ("consumerSecret".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.consumerSecret = SecretBase.fromJson(reader); + } else if ("accessToken".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("accessTokenSecret".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.accessTokenSecret = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedQuickBooksLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedQuickBooksLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java index 617ad2fd9a7a..1bc57416d03b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/QuickbaseLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Quickbase linked service type properties. */ @Fluent -public final class QuickbaseLinkedServiceTypeProperties { +public final class QuickbaseLinkedServiceTypeProperties + implements JsonSerializable { /* * The url to connect Quickbase source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The user token for the Quickbase source. */ - @JsonProperty(value = "userToken", required = true) private SecretBase userToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -122,4 +124,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(QuickbaseLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeJsonField("userToken", this.userToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickbaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickbaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the QuickbaseLinkedServiceTypeProperties. + */ + public static QuickbaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickbaseLinkedServiceTypeProperties deserializedQuickbaseLinkedServiceTypeProperties + = new QuickbaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedQuickbaseLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("userToken".equals(fieldName)) { + deserializedQuickbaseLinkedServiceTypeProperties.userToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedQuickbaseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedQuickbaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RelationalTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RelationalTableDatasetTypeProperties.java index c5f7463c3fa7..0a61b20a4165 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RelationalTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RelationalTableDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Relational table dataset properties. */ @Fluent -public final class RelationalTableDatasetTypeProperties { +public final class RelationalTableDatasetTypeProperties + implements JsonSerializable { /* * The relational table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +55,41 @@ public RelationalTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RelationalTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RelationalTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the RelationalTableDatasetTypeProperties. + */ + public static RelationalTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RelationalTableDatasetTypeProperties deserializedRelationalTableDatasetTypeProperties + = new RelationalTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedRelationalTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedRelationalTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java index d9b7edbdfc13..e2810890937d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RerunTumblingWindowTriggerTypeProperties.java @@ -5,37 +5,40 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; /** * Rerun Trigger properties. */ @Fluent -public final class RerunTumblingWindowTriggerTypeProperties { +public final class RerunTumblingWindowTriggerTypeProperties + implements JsonSerializable { /* * The parent trigger reference. */ - @JsonProperty(value = "parentTrigger", required = true) private Object parentTrigger; /* * The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */ - @JsonProperty(value = "requestedStartTime", required = true) private OffsetDateTime requestedStartTime; /* * The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */ - @JsonProperty(value = "requestedEndTime", required = true) private OffsetDateTime requestedEndTime; /* * The max number of parallel time windows (ready for execution) for which a rerun is triggered. */ - @JsonProperty(value = "rerunConcurrency", required = true) private int rerunConcurrency; /** @@ -154,4 +157,59 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RerunTumblingWindowTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("parentTrigger", this.parentTrigger); + jsonWriter.writeStringField("requestedStartTime", + this.requestedStartTime == null + ? null + : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.requestedStartTime)); + jsonWriter.writeStringField("requestedEndTime", + this.requestedEndTime == null + ? null + : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.requestedEndTime)); + jsonWriter.writeIntField("rerunConcurrency", this.rerunConcurrency); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RerunTumblingWindowTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RerunTumblingWindowTriggerTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RerunTumblingWindowTriggerTypeProperties. + */ + public static RerunTumblingWindowTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RerunTumblingWindowTriggerTypeProperties deserializedRerunTumblingWindowTriggerTypeProperties + = new RerunTumblingWindowTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("parentTrigger".equals(fieldName)) { + deserializedRerunTumblingWindowTriggerTypeProperties.parentTrigger = reader.readUntyped(); + } else if ("requestedStartTime".equals(fieldName)) { + deserializedRerunTumblingWindowTriggerTypeProperties.requestedStartTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("requestedEndTime".equals(fieldName)) { + deserializedRerunTumblingWindowTriggerTypeProperties.requestedEndTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("rerunConcurrency".equals(fieldName)) { + deserializedRerunTumblingWindowTriggerTypeProperties.rerunConcurrency = reader.getInt(); + } else { + reader.skipChildren(); + } + } + + return deserializedRerunTumblingWindowTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java index fe2422597217..75c8d0f981d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ResponsysLinkedServiceTypeProperties.java @@ -6,58 +6,56 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Responsys linked service properties. */ @Fluent -public final class ResponsysLinkedServiceTypeProperties { +public final class ResponsysLinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of the Responsys server. */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The client ID associated with the Responsys application. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -242,4 +240,60 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ResponsysLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ResponsysLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ResponsysLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ResponsysLinkedServiceTypeProperties. + */ + public static ResponsysLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ResponsysLinkedServiceTypeProperties deserializedResponsysLinkedServiceTypeProperties + = new ResponsysLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedResponsysLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedResponsysLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java index 8d160f931a7c..b3c2320357b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestResourceDatasetTypeProperties.java @@ -5,48 +5,44 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; /** * Properties specific to this dataset type. */ @Fluent -public final class RestResourceDatasetTypeProperties { +public final class RestResourceDatasetTypeProperties implements JsonSerializable { /* * The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "relativeUrl") private Object relativeUrl; /* * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestMethod") private Object requestMethod; /* * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestBody") private Object requestBody; /* * The additional HTTP headers in the request to the RESTful API. */ - @JsonProperty(value = "additionalHeaders") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalHeaders; /* * The pagination rules to compose next page requests. */ - @JsonProperty(value = "paginationRules") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map paginationRules; /** @@ -168,4 +164,57 @@ public RestResourceDatasetTypeProperties withPaginationRules(Map */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("relativeUrl", this.relativeUrl); + jsonWriter.writeUntypedField("requestMethod", this.requestMethod); + jsonWriter.writeUntypedField("requestBody", this.requestBody); + jsonWriter.writeMapField("additionalHeaders", this.additionalHeaders, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("paginationRules", this.paginationRules, + (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestResourceDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the RestResourceDatasetTypeProperties. + */ + public static RestResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestResourceDatasetTypeProperties deserializedRestResourceDatasetTypeProperties + = new RestResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("relativeUrl".equals(fieldName)) { + deserializedRestResourceDatasetTypeProperties.relativeUrl = reader.readUntyped(); + } else if ("requestMethod".equals(fieldName)) { + deserializedRestResourceDatasetTypeProperties.requestMethod = reader.readUntyped(); + } else if ("requestBody".equals(fieldName)) { + deserializedRestResourceDatasetTypeProperties.requestBody = reader.readUntyped(); + } else if ("additionalHeaders".equals(fieldName)) { + Map additionalHeaders = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedRestResourceDatasetTypeProperties.additionalHeaders = additionalHeaders; + } else if ("paginationRules".equals(fieldName)) { + Map paginationRules = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedRestResourceDatasetTypeProperties.paginationRules = paginationRules; + } else { + reader.skipChildren(); + } + } + + return deserializedRestResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java index 70acd497356b..a75ea985a322 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/RestServiceLinkedServiceTypeProperties.java @@ -6,72 +6,68 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.RestServiceAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Rest Service linked service properties. */ @Fluent -public final class RestServiceLinkedServiceTypeProperties { +public final class RestServiceLinkedServiceTypeProperties + implements JsonSerializable { /* * The base URL of the REST service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableServerCertificateValidation") private Object enableServerCertificateValidation; /* * Type of authentication used to connect to the REST service. */ - @JsonProperty(value = "authenticationType", required = true) private RestServiceAuthenticationType authenticationType; /* * The user name used in Basic authentication type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The password used in Basic authentication type. */ - @JsonProperty(value = "password") private SecretBase password; /* * The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression * with resultType object). */ - @JsonProperty(value = "authHeaders") private Object authHeaders; /* * The application's client ID used in AadServicePrincipal authentication type. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The application's key used in AadServicePrincipal authentication type. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which * your application resides. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tenant") private Object tenant; /* @@ -79,59 +75,50 @@ public final class RestServiceLinkedServiceTypeProperties { * AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "azureCloudType") private Object azureCloudType; /* * The resource you are requesting authorization to use. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "aadResourceId") private Object aadResourceId; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * The client ID associated with your application. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret associated with your application. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The token endpoint of the authorization server to acquire access token. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "tokenEndpoint") private Object tokenEndpoint; /* * The target service or resource to which the access will be requested. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "resource") private Object resource; /* * The scope of the access required. It describes what kind of access will be requested. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "scope") private Object scope; /** @@ -559,4 +546,98 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RestServiceLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("enableServerCertificateValidation", this.enableServerCertificateValidation); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("authHeaders", this.authHeaders); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeUntypedField("azureCloudType", this.azureCloudType); + jsonWriter.writeUntypedField("aadResourceId", this.aadResourceId); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("credential", this.credential); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("tokenEndpoint", this.tokenEndpoint); + jsonWriter.writeUntypedField("resource", this.resource); + jsonWriter.writeUntypedField("scope", this.scope); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestServiceLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestServiceLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RestServiceLinkedServiceTypeProperties. + */ + public static RestServiceLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestServiceLinkedServiceTypeProperties deserializedRestServiceLinkedServiceTypeProperties + = new RestServiceLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.authenticationType + = RestServiceAuthenticationType.fromString(reader.getString()); + } else if ("enableServerCertificateValidation".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.enableServerCertificateValidation + = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("authHeaders".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.authHeaders = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("azureCloudType".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.azureCloudType = reader.readUntyped(); + } else if ("aadResourceId".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.aadResourceId = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.credential + = CredentialReference.fromJson(reader); + } else if ("clientId".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("tokenEndpoint".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.tokenEndpoint = reader.readUntyped(); + } else if ("resource".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.resource = reader.readUntyped(); + } else if ("scope".equals(fieldName)) { + deserializedRestServiceLinkedServiceTypeProperties.scope = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedRestServiceLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java index 31a8dc19baa5..f9bb4fa07664 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceLinkedServiceTypeProperties.java @@ -5,52 +5,51 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Salesforce linked service properties. */ @Fluent -public final class SalesforceLinkedServiceTypeProperties { +public final class SalesforceLinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify * 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, * 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "username") private Object username; /* * The password for Basic authentication of the Salesforce instance. */ - @JsonProperty(value = "password") private SecretBase password; /* * The security token is optional to remotely access Salesforce instance. */ - @JsonProperty(value = "securityToken") private SecretBase securityToken; /* * The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "apiVersion") private Object apiVersion; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -202,4 +201,56 @@ public void validate() { securityToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("environmentUrl", this.environmentUrl); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("securityToken", this.securityToken); + jsonWriter.writeUntypedField("apiVersion", this.apiVersion); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceLinkedServiceTypeProperties. + */ + public static SalesforceLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceLinkedServiceTypeProperties deserializedSalesforceLinkedServiceTypeProperties + = new SalesforceLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("environmentUrl".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.environmentUrl = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("securityToken".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.securityToken = SecretBase.fromJson(reader); + } else if ("apiVersion".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.apiVersion = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSalesforceLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java index 42957490bf76..292163ee7b12 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceMarketingCloudLinkedServiceTypeProperties.java @@ -5,61 +5,59 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Salesforce Marketing Cloud linked service properties. */ @Fluent -public final class SalesforceMarketingCloudLinkedServiceTypeProperties { +public final class SalesforceMarketingCloudLinkedServiceTypeProperties + implements JsonSerializable { /* * Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in * the linked service. Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean * (or Expression with resultType boolean). */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -234,4 +232,66 @@ public void validate() { clientSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceMarketingCloudLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceMarketingCloudLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceMarketingCloudLinkedServiceTypeProperties. + */ + public static SalesforceMarketingCloudLinkedServiceTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + SalesforceMarketingCloudLinkedServiceTypeProperties deserializedSalesforceMarketingCloudLinkedServiceTypeProperties + = new SalesforceMarketingCloudLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.connectionProperties + = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.clientSecret + = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.useEncryptedEndpoints + = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.useHostVerification + = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.usePeerVerification + = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceMarketingCloudLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceObjectDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceObjectDatasetTypeProperties.java index 5f32ea81f22f..df8bfbe15c60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceObjectDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceObjectDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Salesforce object dataset properties. */ @Fluent -public final class SalesforceObjectDatasetTypeProperties { +public final class SalesforceObjectDatasetTypeProperties + implements JsonSerializable { /* * The Salesforce object API name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "objectApiName") private Object objectApiName; /** @@ -53,4 +57,41 @@ public SalesforceObjectDatasetTypeProperties withObjectApiName(Object objectApiN */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("objectApiName", this.objectApiName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceObjectDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceObjectDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceObjectDatasetTypeProperties. + */ + public static SalesforceObjectDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceObjectDatasetTypeProperties deserializedSalesforceObjectDatasetTypeProperties + = new SalesforceObjectDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("objectApiName".equals(fieldName)) { + deserializedSalesforceObjectDatasetTypeProperties.objectApiName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceObjectDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java index abff4b1c1971..f3268f90ff84 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudLinkedServiceTypeProperties.java @@ -5,58 +5,56 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Salesforce Service Cloud linked service properties. */ @Fluent -public final class SalesforceServiceCloudLinkedServiceTypeProperties { +public final class SalesforceServiceCloudLinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from * sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, * 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "username") private Object username; /* * The password for Basic authentication of the Salesforce instance. */ - @JsonProperty(value = "password") private SecretBase password; /* * The security token is optional to remotely access Salesforce instance. */ - @JsonProperty(value = "securityToken") private SecretBase securityToken; /* * The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "apiVersion") private Object apiVersion; /* * Extended properties appended to the connection string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "extendedProperties") private Object extendedProperties; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -232,4 +230,63 @@ public void validate() { securityToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("environmentUrl", this.environmentUrl); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("securityToken", this.securityToken); + jsonWriter.writeUntypedField("apiVersion", this.apiVersion); + jsonWriter.writeUntypedField("extendedProperties", this.extendedProperties); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudLinkedServiceTypeProperties. + */ + public static SalesforceServiceCloudLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudLinkedServiceTypeProperties deserializedSalesforceServiceCloudLinkedServiceTypeProperties + = new SalesforceServiceCloudLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("environmentUrl".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.environmentUrl = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.password + = SecretBase.fromJson(reader); + } else if ("securityToken".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.securityToken + = SecretBase.fromJson(reader); + } else if ("apiVersion".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.apiVersion = reader.readUntyped(); + } else if ("extendedProperties".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.extendedProperties + = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceServiceCloudLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudObjectDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudObjectDatasetTypeProperties.java index c1c2af5ef39c..d76a8483e60f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudObjectDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudObjectDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Salesforce Service Cloud object dataset properties. */ @Fluent -public final class SalesforceServiceCloudObjectDatasetTypeProperties { +public final class SalesforceServiceCloudObjectDatasetTypeProperties + implements JsonSerializable { /* * The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "objectApiName") private Object objectApiName; /** @@ -53,4 +57,41 @@ public SalesforceServiceCloudObjectDatasetTypeProperties withObjectApiName(Objec */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("objectApiName", this.objectApiName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudObjectDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudObjectDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudObjectDatasetTypeProperties. + */ + public static SalesforceServiceCloudObjectDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudObjectDatasetTypeProperties deserializedSalesforceServiceCloudObjectDatasetTypeProperties + = new SalesforceServiceCloudObjectDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("objectApiName".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDatasetTypeProperties.objectApiName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceServiceCloudObjectDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java index af5a2c169aff..b367a1f5bb54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2LinkedServiceTypeProperties.java @@ -5,53 +5,52 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Salesforce Service Cloud V2 linked service properties. */ @Fluent -public final class SalesforceServiceCloudV2LinkedServiceTypeProperties { +public final class SalesforceServiceCloudV2LinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of Salesforce Service Cloud instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* * The authentication type to be used to connect to the Salesforce. Currently, we only support * OAuth2ClientCredentials, it is also the default value */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by * Salesforce BULK API 2.0. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "apiVersion") private Object apiVersion; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -202,4 +201,61 @@ public void validate() { clientSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("environmentUrl", this.environmentUrl); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("apiVersion", this.apiVersion); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2LinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2LinkedServiceTypeProperties. + */ + public static SalesforceServiceCloudV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2LinkedServiceTypeProperties deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties + = new SalesforceServiceCloudV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("environmentUrl".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.environmentUrl + = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.authenticationType + = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.clientSecret + = SecretBase.fromJson(reader); + } else if ("apiVersion".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.apiVersion = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceServiceCloudV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java index c2e07084f020..38606372f3d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceServiceCloudV2ObjectDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Salesforce Service Cloud V2 object dataset properties. */ @Fluent -public final class SalesforceServiceCloudV2ObjectDatasetTypeProperties { +public final class SalesforceServiceCloudV2ObjectDatasetTypeProperties + implements JsonSerializable { /* * The Salesforce Service Cloud V2 object API name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "objectApiName") private Object objectApiName; /* * The Salesforce Service Cloud V2 reportId. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "reportId") private Object reportId; /** @@ -81,4 +84,46 @@ public SalesforceServiceCloudV2ObjectDatasetTypeProperties withReportId(Object r */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("objectApiName", this.objectApiName); + jsonWriter.writeUntypedField("reportId", this.reportId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2ObjectDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2ObjectDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2ObjectDatasetTypeProperties. + */ + public static SalesforceServiceCloudV2ObjectDatasetTypeProperties fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2ObjectDatasetTypeProperties deserializedSalesforceServiceCloudV2ObjectDatasetTypeProperties + = new SalesforceServiceCloudV2ObjectDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("objectApiName".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDatasetTypeProperties.objectApiName + = reader.readUntyped(); + } else if ("reportId".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDatasetTypeProperties.reportId = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceServiceCloudV2ObjectDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java index b9d6f07a2cd2..d8914eae1c83 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2LinkedServiceTypeProperties.java @@ -5,53 +5,52 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Salesforce V2 linked service properties. */ @Fluent -public final class SalesforceV2LinkedServiceTypeProperties { +public final class SalesforceV2LinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of Salesforce instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "environmentUrl") private Object environmentUrl; /* * The authentication type to be used to connect to the Salesforce. Currently, we only support * OAuth2ClientCredentials, it is also the default value */ - @JsonProperty(value = "authenticationType") private Object authenticationType; /* * The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by * Salesforce BULK API 2.0. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "apiVersion") private Object apiVersion; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -202,4 +201,56 @@ public void validate() { clientSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("environmentUrl", this.environmentUrl); + jsonWriter.writeUntypedField("authenticationType", this.authenticationType); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("apiVersion", this.apiVersion); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2LinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceV2LinkedServiceTypeProperties. + */ + public static SalesforceV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2LinkedServiceTypeProperties deserializedSalesforceV2LinkedServiceTypeProperties + = new SalesforceV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("environmentUrl".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.environmentUrl = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.authenticationType = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("apiVersion".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.apiVersion = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSalesforceV2LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2ObjectDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2ObjectDatasetTypeProperties.java index 6eb129e288d7..421cdc77b339 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2ObjectDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SalesforceV2ObjectDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Salesforce V2 object dataset properties. */ @Fluent -public final class SalesforceV2ObjectDatasetTypeProperties { +public final class SalesforceV2ObjectDatasetTypeProperties + implements JsonSerializable { /* * The Salesforce V2 object API name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "objectApiName") private Object objectApiName; /* * The Salesforce V2 report Id. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "reportId") private Object reportId; /** @@ -79,4 +82,44 @@ public SalesforceV2ObjectDatasetTypeProperties withReportId(Object reportId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("objectApiName", this.objectApiName); + jsonWriter.writeUntypedField("reportId", this.reportId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2ObjectDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2ObjectDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceV2ObjectDatasetTypeProperties. + */ + public static SalesforceV2ObjectDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2ObjectDatasetTypeProperties deserializedSalesforceV2ObjectDatasetTypeProperties + = new SalesforceV2ObjectDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("objectApiName".equals(fieldName)) { + deserializedSalesforceV2ObjectDatasetTypeProperties.objectApiName = reader.readUntyped(); + } else if ("reportId".equals(fieldName)) { + deserializedSalesforceV2ObjectDatasetTypeProperties.reportId = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSalesforceV2ObjectDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java index ee1dfd11e5ae..cc3225c18490 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapBWLinkedServiceTypeProperties.java @@ -6,51 +6,49 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class SapBWLinkedServiceTypeProperties { +public final class SapBWLinkedServiceTypeProperties implements JsonSerializable { /* * Host name of the SAP BW instance. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server", required = true) private Object server; /* * System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "systemNumber", required = true) private Object systemNumber; /* * Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "clientId", required = true) private Object clientId; /* * Username to access the SAP BW server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to access the SAP BW server. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -214,4 +212,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapBWLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("systemNumber", this.systemNumber); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapBWLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapBWLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapBWLinkedServiceTypeProperties. + */ + public static SapBWLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapBWLinkedServiceTypeProperties deserializedSapBWLinkedServiceTypeProperties + = new SapBWLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("systemNumber".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.systemNumber = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapBWLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapBWLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java index 63e3639d360a..01d99f7b6da3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerLinkedServiceTypeProperties.java @@ -6,38 +6,39 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SAP Cloud for Customer linked service properties. */ @Fluent -public final class SapCloudForCustomerLinkedServiceTypeProperties { +public final class SapCloudForCustomerLinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of SAP Cloud for Customer OData API. For example, * '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The username for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * The password for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Either encryptedCredential or username/password must be provided. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -151,4 +152,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapCloudForCustomerLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapCloudForCustomerLinkedServiceTypeProperties. + */ + public static SapCloudForCustomerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerLinkedServiceTypeProperties deserializedSapCloudForCustomerLinkedServiceTypeProperties + = new SapCloudForCustomerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapCloudForCustomerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java index 51544a13e578..2b38adc22a4f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapCloudForCustomerResourceDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sap Cloud For Customer OData resource dataset properties. */ @Fluent -public final class SapCloudForCustomerResourceDatasetTypeProperties { +public final class SapCloudForCustomerResourceDatasetTypeProperties + implements JsonSerializable { /* * The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path", required = true) private Object path; /** @@ -61,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapCloudForCustomerResourceDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerResourceDatasetTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapCloudForCustomerResourceDatasetTypeProperties. + */ + public static SapCloudForCustomerResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerResourceDatasetTypeProperties deserializedSapCloudForCustomerResourceDatasetTypeProperties + = new SapCloudForCustomerResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("path".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDatasetTypeProperties.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapCloudForCustomerResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java index 6141806a2959..a3be9f729487 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccLinkedServiceTypeProperties.java @@ -6,38 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SAP ECC linked service properties. */ @Fluent -public final class SapEccLinkedServiceTypeProperties { +public final class SapEccLinkedServiceTypeProperties implements JsonSerializable { /* * The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The username for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * The password for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Either encryptedCredential or username/password must be provided. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -151,4 +151,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapEccLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapEccLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapEccLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapEccLinkedServiceTypeProperties. + */ + public static SapEccLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapEccLinkedServiceTypeProperties deserializedSapEccLinkedServiceTypeProperties + = new SapEccLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedSapEccLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("username".equals(fieldName)) { + deserializedSapEccLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapEccLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapEccLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapEccLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java index ab07d8c8d4da..aa92e6784cd5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapEccResourceDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sap ECC OData resource dataset properties. */ @Fluent -public final class SapEccResourceDatasetTypeProperties { +public final class SapEccResourceDatasetTypeProperties + implements JsonSerializable { /* * The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path", required = true) private Object path; /** @@ -59,4 +63,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapEccResourceDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapEccResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapEccResourceDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapEccResourceDatasetTypeProperties. + */ + public static SapEccResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapEccResourceDatasetTypeProperties deserializedSapEccResourceDatasetTypeProperties + = new SapEccResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("path".equals(fieldName)) { + deserializedSapEccResourceDatasetTypeProperties.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapEccResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java index a21a35f61710..0e900f8a2525 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaLinkedServiceProperties.java @@ -5,50 +5,48 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SapHanaAuthenticationType; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class SapHanaLinkedServiceProperties { +public final class SapHanaLinkedServiceProperties implements JsonSerializable { /* * SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * Host name of the SAP HANA server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * The authentication type to be used to connect to the SAP HANA server. */ - @JsonProperty(value = "authenticationType") private SapHanaAuthenticationType authenticationType; /* * Username to access the SAP HANA server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to access the SAP HANA server. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -193,4 +191,58 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaLinkedServiceProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaLinkedServiceProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapHanaLinkedServiceProperties. + */ + public static SapHanaLinkedServiceProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaLinkedServiceProperties deserializedSapHanaLinkedServiceProperties + = new SapHanaLinkedServiceProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.connectionString = reader.readUntyped(); + } else if ("server".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.server = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.authenticationType + = SapHanaAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapHanaLinkedServiceProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapHanaLinkedServiceProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaTableDatasetTypeProperties.java index 4e92b068d3dc..e05056614d4d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapHanaTableDatasetTypeProperties.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SAP HANA Table properties. */ @Fluent -public final class SapHanaTableDatasetTypeProperties { +public final class SapHanaTableDatasetTypeProperties implements JsonSerializable { /* * The schema name of SAP HANA. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of SAP HANA. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -77,4 +79,44 @@ public SapHanaTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapHanaTableDatasetTypeProperties. + */ + public static SapHanaTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaTableDatasetTypeProperties deserializedSapHanaTableDatasetTypeProperties + = new SapHanaTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedSapHanaTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedSapHanaTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapHanaTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java index 1e2839f38633..8f8c48ff6af3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpLinkedServiceTypeProperties.java @@ -5,130 +5,116 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class SapOdpLinkedServiceTypeProperties { +public final class SapOdpLinkedServiceTypeProperties implements JsonSerializable { /* * Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a * string.) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemNumber") private Object systemNumber; /* * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number * represented as a string) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "language") private Object language; /* * SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemId") private Object systemId; /* * Username to access the SAP server where the table is located. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to access the SAP server where the table is located. */ - @JsonProperty(value = "password") private SecretBase password; /* * The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServer") private Object messageServer; /* * The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServerService") private Object messageServerService; /* * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sncMode") private Object sncMode; /* * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "sncMyName") private Object sncMyName; /* * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "sncPartnerName") private Object sncPartnerName; /* * External security product's library to access the SAP server where the table is located. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "sncLibraryPath") private Object sncLibraryPath; /* * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "sncQop") private Object sncQop; /* * SNC X509 certificate file path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "x509CertificatePath") private Object x509CertificatePath; /* * The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logonGroup") private Object logonGroup; /* * The subscriber name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "subscriberName") private Object subscriberName; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -539,4 +525,92 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("systemNumber", this.systemNumber); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeUntypedField("language", this.language); + jsonWriter.writeUntypedField("systemId", this.systemId); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("messageServer", this.messageServer); + jsonWriter.writeUntypedField("messageServerService", this.messageServerService); + jsonWriter.writeUntypedField("sncMode", this.sncMode); + jsonWriter.writeUntypedField("sncMyName", this.sncMyName); + jsonWriter.writeUntypedField("sncPartnerName", this.sncPartnerName); + jsonWriter.writeUntypedField("sncLibraryPath", this.sncLibraryPath); + jsonWriter.writeUntypedField("sncQop", this.sncQop); + jsonWriter.writeUntypedField("x509CertificatePath", this.x509CertificatePath); + jsonWriter.writeUntypedField("logonGroup", this.logonGroup); + jsonWriter.writeUntypedField("subscriberName", this.subscriberName); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOdpLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOdpLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapOdpLinkedServiceTypeProperties. + */ + public static SapOdpLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOdpLinkedServiceTypeProperties deserializedSapOdpLinkedServiceTypeProperties + = new SapOdpLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("systemNumber".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.systemNumber = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("language".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.language = reader.readUntyped(); + } else if ("systemId".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.systemId = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("messageServer".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.messageServer = reader.readUntyped(); + } else if ("messageServerService".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.messageServerService = reader.readUntyped(); + } else if ("sncMode".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.sncMode = reader.readUntyped(); + } else if ("sncMyName".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.sncMyName = reader.readUntyped(); + } else if ("sncPartnerName".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.sncPartnerName = reader.readUntyped(); + } else if ("sncLibraryPath".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.sncLibraryPath = reader.readUntyped(); + } else if ("sncQop".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.sncQop = reader.readUntyped(); + } else if ("x509CertificatePath".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.x509CertificatePath = reader.readUntyped(); + } else if ("logonGroup".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.logonGroup = reader.readUntyped(); + } else if ("subscriberName".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.subscriberName = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapOdpLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapOdpLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java index ae80989598a6..e9b1bde48c8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOdpResourceDatasetTypeProperties.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SAP ODP Resource properties. */ @Fluent -public final class SapOdpResourceDatasetTypeProperties { +public final class SapOdpResourceDatasetTypeProperties + implements JsonSerializable { /* * The context of the SAP ODP Object. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "context", required = true) private Object context; /* * The name of the SAP ODP Object. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "objectName", required = true) private Object objectName; /** @@ -90,4 +93,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOdpResourceDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("context", this.context); + jsonWriter.writeUntypedField("objectName", this.objectName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOdpResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOdpResourceDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOdpResourceDatasetTypeProperties. + */ + public static SapOdpResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOdpResourceDatasetTypeProperties deserializedSapOdpResourceDatasetTypeProperties + = new SapOdpResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("context".equals(fieldName)) { + deserializedSapOdpResourceDatasetTypeProperties.context = reader.readUntyped(); + } else if ("objectName".equals(fieldName)) { + deserializedSapOdpResourceDatasetTypeProperties.objectName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapOdpResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java index f5f554d6b9ee..14b6dc56fa20 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubLinkedServiceTypeProperties.java @@ -5,84 +5,78 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to SAP Business Warehouse Open Hub Destination linked service type. */ @Fluent -public final class SapOpenHubLinkedServiceTypeProperties { +public final class SapOpenHubLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "server") private Object server; /* * System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number * represented as a string.) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemNumber") private Object systemNumber; /* * Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit * decimal number represented as a string) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "language") private Object language; /* * SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemId") private Object systemId; /* * Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to access the SAP BW server where the open hub destination is located. */ - @JsonProperty(value = "password") private SecretBase password; /* * The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServer") private Object messageServer; /* * The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServerService") private Object messageServerService; /* * The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logonGroup") private Object logonGroup; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -343,4 +337,71 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("systemNumber", this.systemNumber); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeUntypedField("language", this.language); + jsonWriter.writeUntypedField("systemId", this.systemId); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("messageServer", this.messageServer); + jsonWriter.writeUntypedField("messageServerService", this.messageServerService); + jsonWriter.writeUntypedField("logonGroup", this.logonGroup); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOpenHubLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOpenHubLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapOpenHubLinkedServiceTypeProperties. + */ + public static SapOpenHubLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOpenHubLinkedServiceTypeProperties deserializedSapOpenHubLinkedServiceTypeProperties + = new SapOpenHubLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("systemNumber".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.systemNumber = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("language".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.language = reader.readUntyped(); + } else if ("systemId".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.systemId = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("messageServer".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.messageServer = reader.readUntyped(); + } else if ("messageServerService".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.messageServerService = reader.readUntyped(); + } else if ("logonGroup".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.logonGroup = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapOpenHubLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapOpenHubLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java index 8f936ecf694b..693119e52b67 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapOpenHubTableDatasetTypeProperties.java @@ -6,32 +6,34 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sap Business Warehouse Open Hub Destination Table properties. */ @Fluent -public final class SapOpenHubTableDatasetTypeProperties { +public final class SapOpenHubTableDatasetTypeProperties + implements JsonSerializable { /* * The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "openHubDestinationName", required = true) private Object openHubDestinationName; /* * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "excludeLastRequest") private Object excludeLastRequest; /* * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this * property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ - @JsonProperty(value = "baseRequestId") private Object baseRequestId; /** @@ -122,4 +124,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOpenHubTableDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("openHubDestinationName", this.openHubDestinationName); + jsonWriter.writeUntypedField("excludeLastRequest", this.excludeLastRequest); + jsonWriter.writeUntypedField("baseRequestId", this.baseRequestId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOpenHubTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOpenHubTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOpenHubTableDatasetTypeProperties. + */ + public static SapOpenHubTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOpenHubTableDatasetTypeProperties deserializedSapOpenHubTableDatasetTypeProperties + = new SapOpenHubTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("openHubDestinationName".equals(fieldName)) { + deserializedSapOpenHubTableDatasetTypeProperties.openHubDestinationName = reader.readUntyped(); + } else if ("excludeLastRequest".equals(fieldName)) { + deserializedSapOpenHubTableDatasetTypeProperties.excludeLastRequest = reader.readUntyped(); + } else if ("baseRequestId".equals(fieldName)) { + deserializedSapOpenHubTableDatasetTypeProperties.baseRequestId = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapOpenHubTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java index ffb76392d003..acbd0b1e401e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableLinkedServiceTypeProperties.java @@ -5,118 +5,107 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class SapTableLinkedServiceTypeProperties { +public final class SapTableLinkedServiceTypeProperties + implements JsonSerializable { /* * Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a * string.) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemNumber") private Object systemNumber; /* * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number * represented as a string) Type: string (or Expression with resultType string). */ - @JsonProperty(value = "clientId") private Object clientId; /* * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "language") private Object language; /* * SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "systemId") private Object systemId; /* * Username to access the SAP server where the table is located. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to access the SAP server where the table is located. */ - @JsonProperty(value = "password") private SecretBase password; /* * The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServer") private Object messageServer; /* * The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "messageServerService") private Object messageServerService; /* * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sncMode") private Object sncMode; /* * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "sncMyName") private Object sncMyName; /* * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "sncPartnerName") private Object sncPartnerName; /* * External security product's library to access the SAP server where the table is located. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "sncLibraryPath") private Object sncLibraryPath; /* * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "sncQop") private Object sncQop; /* * The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logonGroup") private Object logonGroup; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -485,4 +474,86 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("systemNumber", this.systemNumber); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeUntypedField("language", this.language); + jsonWriter.writeUntypedField("systemId", this.systemId); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("messageServer", this.messageServer); + jsonWriter.writeUntypedField("messageServerService", this.messageServerService); + jsonWriter.writeUntypedField("sncMode", this.sncMode); + jsonWriter.writeUntypedField("sncMyName", this.sncMyName); + jsonWriter.writeUntypedField("sncPartnerName", this.sncPartnerName); + jsonWriter.writeUntypedField("sncLibraryPath", this.sncLibraryPath); + jsonWriter.writeUntypedField("sncQop", this.sncQop); + jsonWriter.writeUntypedField("logonGroup", this.logonGroup); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTableLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTableLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapTableLinkedServiceTypeProperties. + */ + public static SapTableLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTableLinkedServiceTypeProperties deserializedSapTableLinkedServiceTypeProperties + = new SapTableLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("systemNumber".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.systemNumber = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("language".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.language = reader.readUntyped(); + } else if ("systemId".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.systemId = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("messageServer".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.messageServer = reader.readUntyped(); + } else if ("messageServerService".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.messageServerService = reader.readUntyped(); + } else if ("sncMode".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.sncMode = reader.readUntyped(); + } else if ("sncMyName".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.sncMyName = reader.readUntyped(); + } else if ("sncPartnerName".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.sncPartnerName = reader.readUntyped(); + } else if ("sncLibraryPath".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.sncLibraryPath = reader.readUntyped(); + } else if ("sncQop".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.sncQop = reader.readUntyped(); + } else if ("logonGroup".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.logonGroup = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSapTableLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapTableLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java index 900cbdc0acc8..6261f05cab31 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SapTableResourceDatasetTypeProperties.java @@ -6,17 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SAP Table Resource properties. */ @Fluent -public final class SapTableResourceDatasetTypeProperties { +public final class SapTableResourceDatasetTypeProperties + implements JsonSerializable { /* * The name of the SAP Table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName", required = true) private Object tableName; /** @@ -59,4 +63,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapTableResourceDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTableResourceDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTableResourceDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapTableResourceDatasetTypeProperties. + */ + public static SapTableResourceDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTableResourceDatasetTypeProperties deserializedSapTableResourceDatasetTypeProperties + = new SapTableResourceDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedSapTableResourceDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapTableResourceDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java index ea90aabc7e03..b91ccee0a579 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScheduleTriggerTypeProperties.java @@ -6,18 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Schedule Trigger properties. */ @Fluent -public final class ScheduleTriggerTypeProperties { +public final class ScheduleTriggerTypeProperties implements JsonSerializable { /* * Recurrence schedule configuration. */ - @JsonProperty(value = "recurrence", required = true) private ScheduleTriggerRecurrence recurrence; /** @@ -62,4 +65,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScheduleTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("recurrence", this.recurrence); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScheduleTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScheduleTriggerTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScheduleTriggerTypeProperties. + */ + public static ScheduleTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScheduleTriggerTypeProperties deserializedScheduleTriggerTypeProperties + = new ScheduleTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("recurrence".equals(fieldName)) { + deserializedScheduleTriggerTypeProperties.recurrence = ScheduleTriggerRecurrence.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedScheduleTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java index 57719cbc8ece..ae1b4c8dcd73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ScriptActivityTypeProperties.java @@ -5,33 +5,34 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Script activity properties. */ @Fluent -public final class ScriptActivityTypeProperties { +public final class ScriptActivityTypeProperties implements JsonSerializable { /* * ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "scriptBlockExecutionTimeout") private Object scriptBlockExecutionTimeout; /* * Array of script blocks. Type: array. */ - @JsonProperty(value = "scripts") private List scripts; /* * Log settings of script activity. */ - @JsonProperty(value = "logSettings") private ScriptActivityTypePropertiesLogSettings logSettings; /** @@ -115,4 +116,49 @@ public void validate() { logSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("scriptBlockExecutionTimeout", this.scriptBlockExecutionTimeout); + jsonWriter.writeArrayField("scripts", this.scripts, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("logSettings", this.logSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ScriptActivityTypeProperties. + */ + public static ScriptActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptActivityTypeProperties deserializedScriptActivityTypeProperties = new ScriptActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("scriptBlockExecutionTimeout".equals(fieldName)) { + deserializedScriptActivityTypeProperties.scriptBlockExecutionTimeout = reader.readUntyped(); + } else if ("scripts".equals(fieldName)) { + List scripts + = reader.readArray(reader1 -> ScriptActivityScriptBlock.fromJson(reader1)); + deserializedScriptActivityTypeProperties.scripts = scripts; + } else if ("logSettings".equals(fieldName)) { + deserializedScriptActivityTypeProperties.logSettings + = ScriptActivityTypePropertiesLogSettings.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedScriptActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java index 9ead4e8bc6d0..b39b58cad10b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeNodeInner.java @@ -5,13 +5,14 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeUpdateResult; import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.Map; @@ -20,120 +21,101 @@ * Properties of Self-hosted integration runtime node. */ @Fluent -public final class SelfHostedIntegrationRuntimeNodeInner { +public final class SelfHostedIntegrationRuntimeNodeInner + implements JsonSerializable { /* * Name of the integration runtime node. */ - @JsonProperty(value = "nodeName", access = JsonProperty.Access.WRITE_ONLY) private String nodeName; /* * Machine name of the integration runtime node. */ - @JsonProperty(value = "machineName", access = JsonProperty.Access.WRITE_ONLY) private String machineName; /* * URI for the host machine of the integration runtime. */ - @JsonProperty(value = "hostServiceUri", access = JsonProperty.Access.WRITE_ONLY) private String hostServiceUri; /* * Status of the integration runtime node. */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private SelfHostedIntegrationRuntimeNodeStatus status; /* * The integration runtime capabilities dictionary */ - @JsonProperty(value = "capabilities", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map capabilities; /* * Status of the integration runtime node version. */ - @JsonProperty(value = "versionStatus", access = JsonProperty.Access.WRITE_ONLY) private String versionStatus; /* * Version of the integration runtime node. */ - @JsonProperty(value = "version", access = JsonProperty.Access.WRITE_ONLY) private String version; /* * The time at which the integration runtime node was registered in ISO8601 format. */ - @JsonProperty(value = "registerTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime registerTime; /* * The most recent time at which the integration runtime was connected in ISO8601 format. */ - @JsonProperty(value = "lastConnectTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastConnectTime; /* * The time at which the integration runtime will expire in ISO8601 format. */ - @JsonProperty(value = "expiryTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime expiryTime; /* * The time the node last started up. */ - @JsonProperty(value = "lastStartTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastStartTime; /* * The integration runtime node last stop time. */ - @JsonProperty(value = "lastStopTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastStopTime; /* * The result of the last integration runtime node update. */ - @JsonProperty(value = "lastUpdateResult", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeUpdateResult lastUpdateResult; /* * The last time for the integration runtime node update start. */ - @JsonProperty(value = "lastStartUpdateTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastStartUpdateTime; /* * The last time for the integration runtime node update end. */ - @JsonProperty(value = "lastEndUpdateTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime lastEndUpdateTime; /* * Indicates whether this node is the active dispatcher for integration runtime requests. */ - @JsonProperty(value = "isActiveDispatcher", access = JsonProperty.Access.WRITE_ONLY) private Boolean isActiveDispatcher; /* * Maximum concurrent jobs on the integration runtime node. */ - @JsonProperty(value = "concurrentJobsLimit", access = JsonProperty.Access.WRITE_ONLY) private Integer concurrentJobsLimit; /* * The maximum concurrent jobs in this integration runtime. */ - @JsonProperty(value = "maxConcurrentJobs", access = JsonProperty.Access.WRITE_ONLY) private Integer maxConcurrentJobs; /* * Properties of Self-hosted integration runtime node. */ - @JsonIgnore private Map additionalProperties; /** @@ -311,7 +293,6 @@ public Integer maxConcurrentJobs() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -327,14 +308,6 @@ public SelfHostedIntegrationRuntimeNodeInner withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -342,4 +315,98 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfHostedIntegrationRuntimeNodeInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfHostedIntegrationRuntimeNodeInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SelfHostedIntegrationRuntimeNodeInner. + */ + public static SelfHostedIntegrationRuntimeNodeInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfHostedIntegrationRuntimeNodeInner deserializedSelfHostedIntegrationRuntimeNodeInner + = new SelfHostedIntegrationRuntimeNodeInner(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("nodeName".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.nodeName = reader.getString(); + } else if ("machineName".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.machineName = reader.getString(); + } else if ("hostServiceUri".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.hostServiceUri = reader.getString(); + } else if ("status".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.status + = SelfHostedIntegrationRuntimeNodeStatus.fromString(reader.getString()); + } else if ("capabilities".equals(fieldName)) { + Map capabilities = reader.readMap(reader1 -> reader1.getString()); + deserializedSelfHostedIntegrationRuntimeNodeInner.capabilities = capabilities; + } else if ("versionStatus".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.versionStatus = reader.getString(); + } else if ("version".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.version = reader.getString(); + } else if ("registerTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.registerTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastConnectTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastConnectTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("expiryTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.expiryTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastStartTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastStartTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastStopTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastStopTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastUpdateResult".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastUpdateResult + = IntegrationRuntimeUpdateResult.fromString(reader.getString()); + } else if ("lastStartUpdateTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastStartUpdateTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastEndUpdateTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.lastEndUpdateTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("isActiveDispatcher".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.isActiveDispatcher + = reader.getNullable(JsonReader::getBoolean); + } else if ("concurrentJobsLimit".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.concurrentJobsLimit + = reader.getNullable(JsonReader::getInt); + } else if ("maxConcurrentJobs".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeNodeInner.maxConcurrentJobs + = reader.getNullable(JsonReader::getInt); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSelfHostedIntegrationRuntimeNodeInner.additionalProperties = additionalProperties; + + return deserializedSelfHostedIntegrationRuntimeNodeInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java index 5f0ec2e89670..a874778b5106 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeStatusTypeProperties.java @@ -5,11 +5,15 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAutoUpdate; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode; import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntime; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.List; import java.util.Map; @@ -18,110 +22,93 @@ * Self-hosted integration runtime status type properties. */ @Fluent -public final class SelfHostedIntegrationRuntimeStatusTypeProperties { +public final class SelfHostedIntegrationRuntimeStatusTypeProperties + implements JsonSerializable { /* * The time at which the integration runtime was created, in ISO8601 format. */ - @JsonProperty(value = "createTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime createTime; /* * The task queue id of the integration runtime. */ - @JsonProperty(value = "taskQueueId", access = JsonProperty.Access.WRITE_ONLY) private String taskQueueId; /* * It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted * integration runtime nodes exist). */ - @JsonProperty(value = "internalChannelEncryption", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeInternalChannelEncryptionMode internalChannelEncryption; /* * Version of the integration runtime. */ - @JsonProperty(value = "version", access = JsonProperty.Access.WRITE_ONLY) private String version; /* * The list of nodes for this integration runtime. */ - @JsonProperty(value = "nodes") private List nodes; /* * The date at which the integration runtime will be scheduled to update, in ISO8601 format. */ - @JsonProperty(value = "scheduledUpdateDate", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime scheduledUpdateDate; /* * The time in the date scheduled by service to update the integration runtime, e.g., PT03H is 3 hours */ - @JsonProperty(value = "updateDelayOffset", access = JsonProperty.Access.WRITE_ONLY) private String updateDelayOffset; /* * The local time zone offset in hours. */ - @JsonProperty(value = "localTimeZoneOffset", access = JsonProperty.Access.WRITE_ONLY) private String localTimeZoneOffset; /* * Object with additional information about integration runtime capabilities. */ - @JsonProperty(value = "capabilities", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map capabilities; /* * The URLs for the services used in integration runtime backend service. */ - @JsonProperty(value = "serviceUrls", access = JsonProperty.Access.WRITE_ONLY) private List serviceUrls; /* * Whether Self-hosted integration runtime auto update has been turned on. */ - @JsonProperty(value = "autoUpdate", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeAutoUpdate autoUpdate; /* * Status of the integration runtime version. */ - @JsonProperty(value = "versionStatus", access = JsonProperty.Access.WRITE_ONLY) private String versionStatus; /* * The list of linked integration runtimes that are created to share with this integration runtime. */ - @JsonProperty(value = "links") private List links; /* * The version that the integration runtime is going to update to. */ - @JsonProperty(value = "pushedVersion", access = JsonProperty.Access.WRITE_ONLY) private String pushedVersion; /* * The latest version on download center. */ - @JsonProperty(value = "latestVersion", access = JsonProperty.Access.WRITE_ONLY) private String latestVersion; /* * The estimated time when the self-hosted integration runtime will be updated. */ - @JsonProperty(value = "autoUpdateETA", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime autoUpdateEta; /* * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is * unable to establish a connection with Azure Relay. */ - @JsonProperty(value = "selfContainedInteractiveAuthoringEnabled", access = JsonProperty.Access.WRITE_ONLY) private Boolean selfContainedInteractiveAuthoringEnabled; /** @@ -325,4 +312,87 @@ public void validate() { links().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("nodes", this.nodes, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("links", this.links, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfHostedIntegrationRuntimeStatusTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfHostedIntegrationRuntimeStatusTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SelfHostedIntegrationRuntimeStatusTypeProperties. + */ + public static SelfHostedIntegrationRuntimeStatusTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfHostedIntegrationRuntimeStatusTypeProperties deserializedSelfHostedIntegrationRuntimeStatusTypeProperties + = new SelfHostedIntegrationRuntimeStatusTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("createTime".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.createTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("taskQueueId".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.taskQueueId = reader.getString(); + } else if ("internalChannelEncryption".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.internalChannelEncryption + = IntegrationRuntimeInternalChannelEncryptionMode.fromString(reader.getString()); + } else if ("version".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.version = reader.getString(); + } else if ("nodes".equals(fieldName)) { + List nodes + = reader.readArray(reader1 -> SelfHostedIntegrationRuntimeNodeInner.fromJson(reader1)); + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.nodes = nodes; + } else if ("scheduledUpdateDate".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.scheduledUpdateDate = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("updateDelayOffset".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.updateDelayOffset = reader.getString(); + } else if ("localTimeZoneOffset".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.localTimeZoneOffset + = reader.getString(); + } else if ("capabilities".equals(fieldName)) { + Map capabilities = reader.readMap(reader1 -> reader1.getString()); + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.capabilities = capabilities; + } else if ("serviceUrls".equals(fieldName)) { + List serviceUrls = reader.readArray(reader1 -> reader1.getString()); + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.serviceUrls = serviceUrls; + } else if ("autoUpdate".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.autoUpdate + = IntegrationRuntimeAutoUpdate.fromString(reader.getString()); + } else if ("versionStatus".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.versionStatus = reader.getString(); + } else if ("links".equals(fieldName)) { + List links + = reader.readArray(reader1 -> LinkedIntegrationRuntime.fromJson(reader1)); + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.links = links; + } else if ("pushedVersion".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.pushedVersion = reader.getString(); + } else if ("latestVersion".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.latestVersion = reader.getString(); + } else if ("autoUpdateETA".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.autoUpdateEta = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("selfContainedInteractiveAuthoringEnabled".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatusTypeProperties.selfContainedInteractiveAuthoringEnabled + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedSelfHostedIntegrationRuntimeStatusTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java index 604f71739656..8912e4c9c6e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SelfHostedIntegrationRuntimeTypeProperties.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * The self-hosted integration runtime properties. */ @Fluent -public final class SelfHostedIntegrationRuntimeTypeProperties { +public final class SelfHostedIntegrationRuntimeTypeProperties + implements JsonSerializable { /* * The base definition of a linked integration runtime. */ - @JsonProperty(value = "linkedInfo") private LinkedIntegrationRuntimeType linkedInfo; /* * An alternative option to ensure interactive authoring function when your self-hosted integration runtime is * unable to establish a connection with Azure Relay. */ - @JsonProperty(value = "selfContainedInteractiveAuthoringEnabled") private Boolean selfContainedInteractiveAuthoringEnabled; /** @@ -85,4 +88,47 @@ public void validate() { linkedInfo().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedInfo", this.linkedInfo); + jsonWriter.writeBooleanField("selfContainedInteractiveAuthoringEnabled", + this.selfContainedInteractiveAuthoringEnabled); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfHostedIntegrationRuntimeTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfHostedIntegrationRuntimeTypeProperties if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SelfHostedIntegrationRuntimeTypeProperties. + */ + public static SelfHostedIntegrationRuntimeTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfHostedIntegrationRuntimeTypeProperties deserializedSelfHostedIntegrationRuntimeTypeProperties + = new SelfHostedIntegrationRuntimeTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedInfo".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeTypeProperties.linkedInfo + = LinkedIntegrationRuntimeType.fromJson(reader); + } else if ("selfContainedInteractiveAuthoringEnabled".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeTypeProperties.selfContainedInteractiveAuthoringEnabled + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedSelfHostedIntegrationRuntimeTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java index f63d37879fc1..132797347587 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowLinkedServiceTypeProperties.java @@ -6,75 +6,70 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.ServiceNowAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * ServiceNow server linked service properties. */ @Fluent -public final class ServiceNowLinkedServiceTypeProperties { +public final class ServiceNowLinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of the ServiceNow server. (i.e. .service-now.com) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The authentication type to use. */ - @JsonProperty(value = "authenticationType", required = true) private ServiceNowAuthenticationType authenticationType; /* * The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name for Basic and OAuth2 authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The client id for OAuth2 authentication. */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret for OAuth2 authentication. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -319,4 +314,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ServiceNowLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowLinkedServiceTypeProperties. + */ + public static ServiceNowLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowLinkedServiceTypeProperties deserializedServiceNowLinkedServiceTypeProperties + = new ServiceNowLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.authenticationType + = ServiceNowAuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("clientId".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedServiceNowLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedServiceNowLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java index 7101b1a02af2..e91cc006593e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServiceNowV2LinkedServiceTypeProperties.java @@ -6,62 +6,59 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.ServiceNowV2AuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * ServiceNowV2 server linked service properties. */ @Fluent -public final class ServiceNowV2LinkedServiceTypeProperties { +public final class ServiceNowV2LinkedServiceTypeProperties + implements JsonSerializable { /* * The endpoint of the ServiceNowV2 server. (i.e. .service-now.com) */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The authentication type to use. */ - @JsonProperty(value = "authenticationType", required = true) private ServiceNowV2AuthenticationType authenticationType; /* * The user name used to connect to the ServiceNowV2 server for Basic and OAuth2 authentication. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name for Basic and OAuth2 authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The client id for OAuth2 authentication. */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret for OAuth2 authentication. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * GrantType for OAuth2 authentication. Default value is password. */ - @JsonProperty(value = "grantType") private Object grantType; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -260,4 +257,65 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ServiceNowV2LinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("grantType", this.grantType); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowV2LinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowV2LinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowV2LinkedServiceTypeProperties. + */ + public static ServiceNowV2LinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowV2LinkedServiceTypeProperties deserializedServiceNowV2LinkedServiceTypeProperties + = new ServiceNowV2LinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("endpoint".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.authenticationType + = ServiceNowV2AuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("clientId".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("grantType".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.grantType = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedServiceNowV2LinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedServiceNowV2LinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServicePrincipalCredentialTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServicePrincipalCredentialTypeProperties.java index f99aa018ed32..8b64ef72b03a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServicePrincipalCredentialTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ServicePrincipalCredentialTypeProperties.java @@ -5,30 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Service Principal credential type properties. */ @Fluent -public final class ServicePrincipalCredentialTypeProperties { +public final class ServicePrincipalCredentialTypeProperties + implements JsonSerializable { /* * The app ID of the service principal used to authenticate */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate. */ - @JsonProperty(value = "servicePrincipalKey") private AzureKeyVaultSecretReference servicePrincipalKey; /* * The ID of the tenant to which the service principal belongs */ - @JsonProperty(value = "tenant") private Object tenant; /** @@ -108,4 +110,48 @@ public void validate() { servicePrincipalKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServicePrincipalCredentialTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServicePrincipalCredentialTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ServicePrincipalCredentialTypeProperties. + */ + public static ServicePrincipalCredentialTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServicePrincipalCredentialTypeProperties deserializedServicePrincipalCredentialTypeProperties + = new ServicePrincipalCredentialTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("servicePrincipalId".equals(fieldName)) { + deserializedServicePrincipalCredentialTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedServicePrincipalCredentialTypeProperties.servicePrincipalKey + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedServicePrincipalCredentialTypeProperties.tenant = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedServicePrincipalCredentialTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SetVariableActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SetVariableActivityTypeProperties.java index a07602cf4c9b..2930fd8efdf6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SetVariableActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SetVariableActivityTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SetVariable activity properties. */ @Fluent -public final class SetVariableActivityTypeProperties { +public final class SetVariableActivityTypeProperties implements JsonSerializable { /* * Name of the variable whose value needs to be set. */ - @JsonProperty(value = "variableName") private String variableName; /* * Value to be set. Could be a static value or Expression. */ - @JsonProperty(value = "value") private Object value; /* * If set to true, it sets the pipeline run return value. */ - @JsonProperty(value = "setSystemVariable") private Boolean setSystemVariable; /** @@ -103,4 +104,48 @@ public SetVariableActivityTypeProperties withSetSystemVariable(Boolean setSystem */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("variableName", this.variableName); + jsonWriter.writeUntypedField("value", this.value); + jsonWriter.writeBooleanField("setSystemVariable", this.setSystemVariable); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SetVariableActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SetVariableActivityTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SetVariableActivityTypeProperties. + */ + public static SetVariableActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SetVariableActivityTypeProperties deserializedSetVariableActivityTypeProperties + = new SetVariableActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("variableName".equals(fieldName)) { + deserializedSetVariableActivityTypeProperties.variableName = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedSetVariableActivityTypeProperties.value = reader.readUntyped(); + } else if ("setSystemVariable".equals(fieldName)) { + deserializedSetVariableActivityTypeProperties.setSystemVariable + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedSetVariableActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java index 14c4d51a3080..ec7c623d62f2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SftpServerLinkedServiceTypeProperties.java @@ -6,51 +6,50 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SftpAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Properties specific to this linked service type. */ @Fluent -public final class SftpServerLinkedServiceTypeProperties { +public final class SftpServerLinkedServiceTypeProperties + implements JsonSerializable { /* * The SFTP server host name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: * integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "port") private Object port; /* * The authentication type to be used to connect to the FTP server. */ - @JsonProperty(value = "authenticationType") private SftpAuthenticationType authenticationType; /* * The username used to log on to the SFTP server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * Password to logon the SFTP server for Basic authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* @@ -58,7 +57,6 @@ public final class SftpServerLinkedServiceTypeProperties { * copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH * private key should be OpenSSH format. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "privateKeyPath") private Object privateKeyPath; /* @@ -66,27 +64,23 @@ public final class SftpServerLinkedServiceTypeProperties { * authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH * format. */ - @JsonProperty(value = "privateKeyContent") private SecretBase privateKeyContent; /* * The password to decrypt the SSH private key if the SSH private key is encrypted. */ - @JsonProperty(value = "passPhrase") private SecretBase passPhrase; /* * If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "skipHostKeyValidation") private Object skipHostKeyValidation; /* * The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be * specified. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "hostKeyFingerprint") private Object hostKeyFingerprint; /** @@ -358,4 +352,74 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SftpServerLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("privateKeyPath", this.privateKeyPath); + jsonWriter.writeJsonField("privateKeyContent", this.privateKeyContent); + jsonWriter.writeJsonField("passPhrase", this.passPhrase); + jsonWriter.writeUntypedField("skipHostKeyValidation", this.skipHostKeyValidation); + jsonWriter.writeUntypedField("hostKeyFingerprint", this.hostKeyFingerprint); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SftpServerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SftpServerLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SftpServerLinkedServiceTypeProperties. + */ + public static SftpServerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SftpServerLinkedServiceTypeProperties deserializedSftpServerLinkedServiceTypeProperties + = new SftpServerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.authenticationType + = SftpAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("privateKeyPath".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.privateKeyPath = reader.readUntyped(); + } else if ("privateKeyContent".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.privateKeyContent = SecretBase.fromJson(reader); + } else if ("passPhrase".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.passPhrase = SecretBase.fromJson(reader); + } else if ("skipHostKeyValidation".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.skipHostKeyValidation = reader.readUntyped(); + } else if ("hostKeyFingerprint".equals(fieldName)) { + deserializedSftpServerLinkedServiceTypeProperties.hostKeyFingerprint = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSftpServerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListDatasetTypeProperties.java index 40c8768afe0e..187b43a590d6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListDatasetTypeProperties.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sharepoint online list dataset properties. */ @Fluent -public final class SharePointOnlineListDatasetTypeProperties { +public final class SharePointOnlineListDatasetTypeProperties + implements JsonSerializable { /* * The name of the SharePoint Online list. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "listName") private Object listName; /** @@ -53,4 +57,41 @@ public SharePointOnlineListDatasetTypeProperties withListName(Object listName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("listName", this.listName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SharePointOnlineListDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SharePointOnlineListDatasetTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SharePointOnlineListDatasetTypeProperties. + */ + public static SharePointOnlineListDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SharePointOnlineListDatasetTypeProperties deserializedSharePointOnlineListDatasetTypeProperties + = new SharePointOnlineListDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("listName".equals(fieldName)) { + deserializedSharePointOnlineListDatasetTypeProperties.listName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSharePointOnlineListDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java index d06002df6871..151524feba4e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SharePointOnlineListLinkedServiceTypeProperties.java @@ -6,47 +6,47 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SharePoint Online List linked service properties. */ @Fluent -public final class SharePointOnlineListLinkedServiceTypeProperties { +public final class SharePointOnlineListLinkedServiceTypeProperties + implements JsonSerializable { /* * The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "siteUrl", required = true) private Object siteUrl; /* * The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview * page. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tenantId", required = true) private Object tenantId; /* * The application (client) ID of your application registered in Azure Active Directory. Make sure to grant * SharePoint site permission to this application. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId", required = true) private Object servicePrincipalId; /* * The client secret of your application registered in Azure Active Directory. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "servicePrincipalKey", required = true) private SecretBase servicePrincipalKey; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -198,4 +198,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SharePointOnlineListLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("siteUrl", this.siteUrl); + jsonWriter.writeUntypedField("tenantId", this.tenantId); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SharePointOnlineListLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SharePointOnlineListLinkedServiceTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SharePointOnlineListLinkedServiceTypeProperties. + */ + public static SharePointOnlineListLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SharePointOnlineListLinkedServiceTypeProperties deserializedSharePointOnlineListLinkedServiceTypeProperties + = new SharePointOnlineListLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("siteUrl".equals(fieldName)) { + deserializedSharePointOnlineListLinkedServiceTypeProperties.siteUrl = reader.readUntyped(); + } else if ("tenantId".equals(fieldName)) { + deserializedSharePointOnlineListLinkedServiceTypeProperties.tenantId = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedSharePointOnlineListLinkedServiceTypeProperties.servicePrincipalId + = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedSharePointOnlineListLinkedServiceTypeProperties.servicePrincipalKey + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSharePointOnlineListLinkedServiceTypeProperties.encryptedCredential + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSharePointOnlineListLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java index 1c29942d242c..fd30224ecd00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ShopifyLinkedServiceTypeProperties.java @@ -6,50 +6,48 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Shopify Service linked service properties. */ @Fluent -public final class ShopifyLinkedServiceTypeProperties { +public final class ShopifyLinkedServiceTypeProperties implements JsonSerializable { /* * The endpoint of the Shopify server. (i.e. mystore.myshopify.com) */ - @JsonProperty(value = "host", required = true) private Object host; /* * The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -205,4 +203,57 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ShopifyLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ShopifyLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ShopifyLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ShopifyLinkedServiceTypeProperties. + */ + public static ShopifyLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ShopifyLinkedServiceTypeProperties deserializedShopifyLinkedServiceTypeProperties + = new ShopifyLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("accessToken".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedShopifyLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedShopifyLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java index a3221f0a7675..9cfc0091b4e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SmartsheetLinkedServiceTypeProperties.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Smartsheet linked service type properties. */ @Fluent -public final class SmartsheetLinkedServiceTypeProperties { +public final class SmartsheetLinkedServiceTypeProperties + implements JsonSerializable { /* * The api token for the Smartsheet source. */ - @JsonProperty(value = "apiToken", required = true) private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -91,4 +94,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SmartsheetLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SmartsheetLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SmartsheetLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SmartsheetLinkedServiceTypeProperties. + */ + public static SmartsheetLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SmartsheetLinkedServiceTypeProperties deserializedSmartsheetLinkedServiceTypeProperties + = new SmartsheetLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("apiToken".equals(fieldName)) { + deserializedSmartsheetLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSmartsheetLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSmartsheetLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeDatasetTypeProperties.java index 2299c9b1e7c5..0a5691214142 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeDatasetTypeProperties.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Snowflake dataset properties. */ @Fluent -public final class SnowflakeDatasetTypeProperties { +public final class SnowflakeDatasetTypeProperties implements JsonSerializable { /* * The schema name of the Snowflake database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the Snowflake database. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +83,44 @@ public SnowflakeDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SnowflakeDatasetTypeProperties. + */ + public static SnowflakeDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeDatasetTypeProperties deserializedSnowflakeDatasetTypeProperties + = new SnowflakeDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedSnowflakeDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedSnowflakeDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSnowflakeDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java index 533a2c5a9870..563c29aba543 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedServiceTypeProperties.java @@ -6,31 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Snowflake linked service properties. */ @Fluent -public final class SnowflakeLinkedServiceTypeProperties { +public final class SnowflakeLinkedServiceTypeProperties + implements JsonSerializable { /* * The connection string of snowflake. Type: string, SecureString. */ - @JsonProperty(value = "connectionString", required = true) private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private AzureKeyVaultSecretReference password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -118,4 +120,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeLinkedServiceTypeProperties. + */ + public static SnowflakeLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeLinkedServiceTypeProperties deserializedSnowflakeLinkedServiceTypeProperties + = new SnowflakeLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedSnowflakeLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSnowflakeLinkedServiceTypeProperties.password + = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSnowflakeLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSnowflakeLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java index 22e7b68f8d9c..4705f7807303 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SnowflakeLinkedV2ServiceTypeProperties.java @@ -6,92 +6,84 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SnowflakeAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Snowflake linked service properties. */ @Fluent -public final class SnowflakeLinkedV2ServiceTypeProperties { +public final class SnowflakeLinkedV2ServiceTypeProperties + implements JsonSerializable { /* * The account identifier of your Snowflake account, e.g. xy12345.east-us-2.azure */ - @JsonProperty(value = "accountIdentifier", required = true) private Object accountIdentifier; /* * The name of the Snowflake user. */ - @JsonProperty(value = "user") private Object user; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "password") private SecretBase password; /* * The name of the Snowflake database. */ - @JsonProperty(value = "database", required = true) private Object database; /* * The name of the Snowflake warehouse. */ - @JsonProperty(value = "warehouse", required = true) private Object warehouse; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private SnowflakeAuthenticationType authenticationType; /* * The client ID of the application registered in Azure Active Directory for AADServicePrincipal authentication. */ - @JsonProperty(value = "clientId") private Object clientId; /* * The Azure key vault secret reference of client secret for AADServicePrincipal authentication. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The tenant ID of the application registered in Azure Active Directory for AADServicePrincipal authentication. */ - @JsonProperty(value = "tenantId") private Object tenantId; /* * The scope of the application registered in Azure Active Directory for AADServicePrincipal authentication. */ - @JsonProperty(value = "scope") private Object scope; /* * The Azure key vault secret reference of privateKey for KeyPair auth. */ - @JsonProperty(value = "privateKey") private SecretBase privateKey; /* * The Azure key vault secret reference of private key password for KeyPair auth with encrypted private key. */ - @JsonProperty(value = "privateKeyPassphrase") private SecretBase privateKeyPassphrase; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -411,4 +403,81 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeLinkedV2ServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("accountIdentifier", this.accountIdentifier); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("warehouse", this.warehouse); + jsonWriter.writeUntypedField("user", this.user); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("tenantId", this.tenantId); + jsonWriter.writeUntypedField("scope", this.scope); + jsonWriter.writeJsonField("privateKey", this.privateKey); + jsonWriter.writeJsonField("privateKeyPassphrase", this.privateKeyPassphrase); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeLinkedV2ServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeLinkedV2ServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeLinkedV2ServiceTypeProperties. + */ + public static SnowflakeLinkedV2ServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeLinkedV2ServiceTypeProperties deserializedSnowflakeLinkedV2ServiceTypeProperties + = new SnowflakeLinkedV2ServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountIdentifier".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.accountIdentifier = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.database = reader.readUntyped(); + } else if ("warehouse".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.warehouse = reader.readUntyped(); + } else if ("user".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.user = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("authenticationType".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.authenticationType + = SnowflakeAuthenticationType.fromString(reader.getString()); + } else if ("clientId".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("tenantId".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.tenantId = reader.readUntyped(); + } else if ("scope".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.scope = reader.readUntyped(); + } else if ("privateKey".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.privateKey = SecretBase.fromJson(reader); + } else if ("privateKeyPassphrase".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.privateKeyPassphrase + = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSnowflakeLinkedV2ServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSnowflakeLinkedV2ServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkDatasetTypeProperties.java index 79cc10a18d87..1045f8917994 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Spark Properties. */ @Fluent -public final class SparkDatasetTypeProperties { +public final class SparkDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Spark. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Spark. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public SparkDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SparkDatasetTypeProperties. + */ + public static SparkDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkDatasetTypeProperties deserializedSparkDatasetTypeProperties = new SparkDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedSparkDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedSparkDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedSparkDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSparkDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java index 3a8d1cb27ead..44c8c39427cc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SparkLinkedServiceTypeProperties.java @@ -6,69 +6,64 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SparkAuthenticationType; import com.azure.resourcemanager.datafactory.models.SparkServerType; import com.azure.resourcemanager.datafactory.models.SparkThriftTransportProtocol; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Spark Server linked service properties. */ @Fluent -public final class SparkLinkedServiceTypeProperties { +public final class SparkLinkedServiceTypeProperties implements JsonSerializable { /* * IP address or host name of the Spark server */ - @JsonProperty(value = "host", required = true) private Object host; /* * The TCP port that the Spark server uses to listen for client connections. */ - @JsonProperty(value = "port", required = true) private Object port; /* * The type of Spark server. */ - @JsonProperty(value = "serverType") private SparkServerType serverType; /* * The transport protocol to use in the Thrift layer. */ - @JsonProperty(value = "thriftTransportProtocol") private SparkThriftTransportProtocol thriftTransportProtocol; /* * The authentication method used to access the Spark server. */ - @JsonProperty(value = "authenticationType", required = true) private SparkAuthenticationType authenticationType; /* * The user name that you use to access Spark Server. */ - @JsonProperty(value = "username") private Object username; /* * The password corresponding to the user name that you provided in the Username field */ - @JsonProperty(value = "password") private SecretBase password; /* * The partial URL corresponding to the Spark server. */ - @JsonProperty(value = "httpPath") private Object httpPath; /* * Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - @JsonProperty(value = "enableSsl") private Object enableSsl; /* @@ -76,34 +71,29 @@ public final class SparkLinkedServiceTypeProperties { * SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file * installed with the IR. */ - @JsonProperty(value = "trustedCertPath") private Object trustedCertPath; /* * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default * value is false. */ - @JsonProperty(value = "useSystemTrustStore") private Object useSystemTrustStore; /* * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when * connecting over SSL. The default value is false. */ - @JsonProperty(value = "allowHostNameCNMismatch") private Object allowHostnameCNMismatch; /* * Specifies whether to allow self-signed certificates from the server. The default value is false. */ - @JsonProperty(value = "allowSelfSignedServerCert") private Object allowSelfSignedServerCert; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -434,4 +424,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SparkLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("port", this.port); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeStringField("serverType", this.serverType == null ? null : this.serverType.toString()); + jsonWriter.writeStringField("thriftTransportProtocol", + this.thriftTransportProtocol == null ? null : this.thriftTransportProtocol.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("httpPath", this.httpPath); + jsonWriter.writeUntypedField("enableSsl", this.enableSsl); + jsonWriter.writeUntypedField("trustedCertPath", this.trustedCertPath); + jsonWriter.writeUntypedField("useSystemTrustStore", this.useSystemTrustStore); + jsonWriter.writeUntypedField("allowHostNameCNMismatch", this.allowHostnameCNMismatch); + jsonWriter.writeUntypedField("allowSelfSignedServerCert", this.allowSelfSignedServerCert); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SparkLinkedServiceTypeProperties. + */ + public static SparkLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkLinkedServiceTypeProperties deserializedSparkLinkedServiceTypeProperties + = new SparkLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("host".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("port".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.port = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.authenticationType + = SparkAuthenticationType.fromString(reader.getString()); + } else if ("serverType".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.serverType + = SparkServerType.fromString(reader.getString()); + } else if ("thriftTransportProtocol".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.thriftTransportProtocol + = SparkThriftTransportProtocol.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("httpPath".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.httpPath = reader.readUntyped(); + } else if ("enableSsl".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.enableSsl = reader.readUntyped(); + } else if ("trustedCertPath".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.trustedCertPath = reader.readUntyped(); + } else if ("useSystemTrustStore".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.useSystemTrustStore = reader.readUntyped(); + } else if ("allowHostNameCNMismatch".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.allowHostnameCNMismatch = reader.readUntyped(); + } else if ("allowSelfSignedServerCert".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.allowSelfSignedServerCert = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSparkLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSparkLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java index 77e568d1c78c..990d5cd8a59e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerLinkedServiceTypeProperties.java @@ -5,12 +5,15 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.CredentialReference; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedProperties; import com.azure.resourcemanager.datafactory.models.SqlServerAuthenticationType; import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SQL Server linked service properties. @@ -20,44 +23,37 @@ public final class SqlServerLinkedServiceTypeProperties extends SqlServerBaseLin /* * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The type used for authentication. Type: string. */ - @JsonProperty(value = "authenticationType") private SqlServerAuthenticationType authenticationType; /* * The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The on-premises Windows authentication password. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * Sql always encrypted properties. */ - @JsonProperty(value = "alwaysEncryptedSettings") private SqlAlwaysEncryptedProperties alwaysEncryptedSettings; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -402,4 +398,119 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", server()); + jsonWriter.writeUntypedField("database", database()); + jsonWriter.writeUntypedField("encrypt", encrypt()); + jsonWriter.writeUntypedField("trustServerCertificate", trustServerCertificate()); + jsonWriter.writeUntypedField("hostNameInCertificate", hostnameInCertificate()); + jsonWriter.writeUntypedField("applicationIntent", applicationIntent()); + jsonWriter.writeUntypedField("connectTimeout", connectTimeout()); + jsonWriter.writeUntypedField("connectRetryCount", connectRetryCount()); + jsonWriter.writeUntypedField("connectRetryInterval", connectRetryInterval()); + jsonWriter.writeUntypedField("loadBalanceTimeout", loadBalanceTimeout()); + jsonWriter.writeUntypedField("commandTimeout", commandTimeout()); + jsonWriter.writeUntypedField("integratedSecurity", integratedSecurity()); + jsonWriter.writeUntypedField("failoverPartner", failoverPartner()); + jsonWriter.writeUntypedField("maxPoolSize", maxPoolSize()); + jsonWriter.writeUntypedField("minPoolSize", minPoolSize()); + jsonWriter.writeUntypedField("multipleActiveResultSets", multipleActiveResultSets()); + jsonWriter.writeUntypedField("multiSubnetFailover", multiSubnetFailover()); + jsonWriter.writeUntypedField("packetSize", packetSize()); + jsonWriter.writeUntypedField("pooling", pooling()); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeJsonField("alwaysEncryptedSettings", this.alwaysEncryptedSettings); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlServerLinkedServiceTypeProperties. + */ + public static SqlServerLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerLinkedServiceTypeProperties deserializedSqlServerLinkedServiceTypeProperties + = new SqlServerLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withServer(reader.readUntyped()); + } else if ("database".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withDatabase(reader.readUntyped()); + } else if ("encrypt".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withEncrypt(reader.readUntyped()); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withTrustServerCertificate(reader.readUntyped()); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withHostnameInCertificate(reader.readUntyped()); + } else if ("applicationIntent".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withApplicationIntent(reader.readUntyped()); + } else if ("connectTimeout".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withConnectTimeout(reader.readUntyped()); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withConnectRetryCount(reader.readUntyped()); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withConnectRetryInterval(reader.readUntyped()); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withLoadBalanceTimeout(reader.readUntyped()); + } else if ("commandTimeout".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withCommandTimeout(reader.readUntyped()); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withIntegratedSecurity(reader.readUntyped()); + } else if ("failoverPartner".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withFailoverPartner(reader.readUntyped()); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withMaxPoolSize(reader.readUntyped()); + } else if ("minPoolSize".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withMinPoolSize(reader.readUntyped()); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withMultipleActiveResultSets(reader.readUntyped()); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withMultiSubnetFailover(reader.readUntyped()); + } else if ("packetSize".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withPacketSize(reader.readUntyped()); + } else if ("pooling".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.withPooling(reader.readUntyped()); + } else if ("connectionString".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.authenticationType + = SqlServerAuthenticationType.fromString(reader.getString()); + } else if ("userName".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("alwaysEncryptedSettings".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.alwaysEncryptedSettings + = SqlAlwaysEncryptedProperties.fromJson(reader); + } else if ("credential".equals(fieldName)) { + deserializedSqlServerLinkedServiceTypeProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlServerLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java index c3327e1ef423..b99f01e1c67e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerStoredProcedureActivityTypeProperties.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SQL stored procedure activity properties. */ @Fluent -public final class SqlServerStoredProcedureActivityTypeProperties { +public final class SqlServerStoredProcedureActivityTypeProperties + implements JsonSerializable { /* * Stored procedure name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "storedProcedureName", required = true) private Object storedProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /** @@ -88,4 +91,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SqlServerStoredProcedureActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("storedProcedureName", this.storedProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerStoredProcedureActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerStoredProcedureActivityTypeProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SqlServerStoredProcedureActivityTypeProperties. + */ + public static SqlServerStoredProcedureActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerStoredProcedureActivityTypeProperties deserializedSqlServerStoredProcedureActivityTypeProperties + = new SqlServerStoredProcedureActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("storedProcedureName".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivityTypeProperties.storedProcedureName + = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivityTypeProperties.storedProcedureParameters + = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlServerStoredProcedureActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerTableDatasetTypeProperties.java index b14fc349d181..eda34a65682a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SqlServerTableDatasetTypeProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * On-premises SQL Server dataset properties. */ @Fluent -public final class SqlServerTableDatasetTypeProperties { +public final class SqlServerTableDatasetTypeProperties + implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -109,4 +111,47 @@ public SqlServerTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlServerTableDatasetTypeProperties. + */ + public static SqlServerTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerTableDatasetTypeProperties deserializedSqlServerTableDatasetTypeProperties + = new SqlServerTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedSqlServerTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedSqlServerTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedSqlServerTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlServerTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java index c619b7393ebb..c63279fd5929 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SquareLinkedServiceTypeProperties.java @@ -5,69 +5,64 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Square Service linked service properties. */ @Fluent -public final class SquareLinkedServiceTypeProperties { +public final class SquareLinkedServiceTypeProperties implements JsonSerializable { /* * Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. * Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The URL of the Square instance. (i.e. mystore.mysquare.com) */ - @JsonProperty(value = "host") private Object host; /* * The client ID associated with your Square application. */ - @JsonProperty(value = "clientId") private Object clientId; /* * The client secret associated with your Square application. */ - @JsonProperty(value = "clientSecret") private SecretBase clientSecret; /* * The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) */ - @JsonProperty(value = "redirectUri") private Object redirectUri; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -278,4 +273,65 @@ public void validate() { clientSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeUntypedField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + jsonWriter.writeUntypedField("redirectUri", this.redirectUri); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SquareLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SquareLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SquareLinkedServiceTypeProperties. + */ + public static SquareLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SquareLinkedServiceTypeProperties deserializedSquareLinkedServiceTypeProperties + = new SquareLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("host".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("clientId".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.clientId = reader.readUntyped(); + } else if ("clientSecret".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.clientSecret = SecretBase.fromJson(reader); + } else if ("redirectUri".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.redirectUri = reader.readUntyped(); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSquareLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSquareLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java index 44552374e318..1b50ec67a0de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisLogLocationTypeProperties.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SsisAccessCredential; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SSIS package execution log location properties. */ @Fluent -public final class SsisLogLocationTypeProperties { +public final class SsisLogLocationTypeProperties implements JsonSerializable { /* * The package execution log access credential. */ - @JsonProperty(value = "accessCredential") private SsisAccessCredential accessCredential; /* * Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "logRefreshInterval") private Object logRefreshInterval; /** @@ -86,4 +88,44 @@ public void validate() { accessCredential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("accessCredential", this.accessCredential); + jsonWriter.writeUntypedField("logRefreshInterval", this.logRefreshInterval); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisLogLocationTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisLogLocationTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisLogLocationTypeProperties. + */ + public static SsisLogLocationTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisLogLocationTypeProperties deserializedSsisLogLocationTypeProperties + = new SsisLogLocationTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accessCredential".equals(fieldName)) { + deserializedSsisLogLocationTypeProperties.accessCredential = SsisAccessCredential.fromJson(reader); + } else if ("logRefreshInterval".equals(fieldName)) { + deserializedSsisLogLocationTypeProperties.logRefreshInterval = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisLogLocationTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataListResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataListResponseInner.java index 5189287b6e5d..f953ef0f37ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataListResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataListResponseInner.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SsisObjectMetadata; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of SSIS object metadata. */ @Fluent -public final class SsisObjectMetadataListResponseInner { +public final class SsisObjectMetadataListResponseInner + implements JsonSerializable { /* * List of SSIS object metadata. */ - @JsonProperty(value = "value") private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -82,4 +85,45 @@ public void validate() { value().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisObjectMetadataListResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisObjectMetadataListResponseInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisObjectMetadataListResponseInner. + */ + public static SsisObjectMetadataListResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisObjectMetadataListResponseInner deserializedSsisObjectMetadataListResponseInner + = new SsisObjectMetadataListResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> SsisObjectMetadata.fromJson(reader1)); + deserializedSsisObjectMetadataListResponseInner.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedSsisObjectMetadataListResponseInner.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisObjectMetadataListResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataStatusResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataStatusResponseInner.java index 1fda656690bc..662ad184c9b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataStatusResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisObjectMetadataStatusResponseInner.java @@ -5,35 +5,36 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The status of the operation. */ @Fluent -public final class SsisObjectMetadataStatusResponseInner { +public final class SsisObjectMetadataStatusResponseInner + implements JsonSerializable { /* * The status of the operation. */ - @JsonProperty(value = "status") private String status; /* * The operation name. */ - @JsonProperty(value = "name") private String name; /* * The operation properties. */ - @JsonProperty(value = "properties") private String properties; /* * The operation error message. */ - @JsonProperty(value = "error") private String error; /** @@ -129,4 +130,50 @@ public SsisObjectMetadataStatusResponseInner withError(String error) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("status", this.status); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("properties", this.properties); + jsonWriter.writeStringField("error", this.error); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisObjectMetadataStatusResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisObjectMetadataStatusResponseInner if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisObjectMetadataStatusResponseInner. + */ + public static SsisObjectMetadataStatusResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisObjectMetadataStatusResponseInner deserializedSsisObjectMetadataStatusResponseInner + = new SsisObjectMetadataStatusResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("status".equals(fieldName)) { + deserializedSsisObjectMetadataStatusResponseInner.status = reader.getString(); + } else if ("name".equals(fieldName)) { + deserializedSsisObjectMetadataStatusResponseInner.name = reader.getString(); + } else if ("properties".equals(fieldName)) { + deserializedSsisObjectMetadataStatusResponseInner.properties = reader.getString(); + } else if ("error".equals(fieldName)) { + deserializedSsisObjectMetadataStatusResponseInner.error = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisObjectMetadataStatusResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisPackageLocationTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisPackageLocationTypeProperties.java index 540b2d6438c1..9d629f978233 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisPackageLocationTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SsisPackageLocationTypeProperties.java @@ -5,63 +5,59 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SsisAccessCredential; import com.azure.resourcemanager.datafactory.models.SsisChildPackage; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * SSIS package location properties. */ @Fluent -public final class SsisPackageLocationTypeProperties { +public final class SsisPackageLocationTypeProperties implements JsonSerializable { /* * Password of the package. */ - @JsonProperty(value = "packagePassword") private SecretBase packagePassword; /* * The package access credential. */ - @JsonProperty(value = "accessCredential") private SsisAccessCredential accessCredential; /* * The configuration file of the package execution. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "configurationPath") private Object configurationPath; /* * The configuration file access credential. */ - @JsonProperty(value = "configurationAccessCredential") private SsisAccessCredential configurationAccessCredential; /* * The package name. */ - @JsonProperty(value = "packageName") private String packageName; /* * The embedded package content. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "packageContent") private Object packageContent; /* * The embedded package last modified date. */ - @JsonProperty(value = "packageLastModifiedDate") private String packageLastModifiedDate; /* * The embedded child package list. */ - @JsonProperty(value = "childPackages") private List childPackages; /** @@ -254,4 +250,66 @@ public void validate() { childPackages().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("packagePassword", this.packagePassword); + jsonWriter.writeJsonField("accessCredential", this.accessCredential); + jsonWriter.writeUntypedField("configurationPath", this.configurationPath); + jsonWriter.writeJsonField("configurationAccessCredential", this.configurationAccessCredential); + jsonWriter.writeStringField("packageName", this.packageName); + jsonWriter.writeUntypedField("packageContent", this.packageContent); + jsonWriter.writeStringField("packageLastModifiedDate", this.packageLastModifiedDate); + jsonWriter.writeArrayField("childPackages", this.childPackages, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisPackageLocationTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisPackageLocationTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisPackageLocationTypeProperties. + */ + public static SsisPackageLocationTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisPackageLocationTypeProperties deserializedSsisPackageLocationTypeProperties + = new SsisPackageLocationTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("packagePassword".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.packagePassword = SecretBase.fromJson(reader); + } else if ("accessCredential".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.accessCredential + = SsisAccessCredential.fromJson(reader); + } else if ("configurationPath".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.configurationPath = reader.readUntyped(); + } else if ("configurationAccessCredential".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.configurationAccessCredential + = SsisAccessCredential.fromJson(reader); + } else if ("packageName".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.packageName = reader.getString(); + } else if ("packageContent".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.packageContent = reader.readUntyped(); + } else if ("packageLastModifiedDate".equals(fieldName)) { + deserializedSsisPackageLocationTypeProperties.packageLastModifiedDate = reader.getString(); + } else if ("childPackages".equals(fieldName)) { + List childPackages + = reader.readArray(reader1 -> SsisChildPackage.fromJson(reader1)); + deserializedSsisPackageLocationTypeProperties.childPackages = childPackages; + } else { + reader.skipChildren(); + } + } + + return deserializedSsisPackageLocationTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java index 13e50cb039ae..31f27d7a69e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SwitchActivityTypeProperties.java @@ -6,36 +6,37 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.Expression; import com.azure.resourcemanager.datafactory.models.SwitchCase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Switch activity properties. */ @Fluent -public final class SwitchActivityTypeProperties { +public final class SwitchActivityTypeProperties implements JsonSerializable { /* * An expression that would evaluate to a string or integer. This is used to determine the block of activities in * cases that will be executed. */ - @JsonProperty(value = "on", required = true) private Expression on; /* * List of cases that correspond to expected values of the 'on' property. This is an optional property and if not * provided, the activity will execute activities provided in defaultActivities. */ - @JsonProperty(value = "cases") private List cases; /* * List of activities to execute if no case condition is satisfied. This is an optional property and if not * provided, the activity will exit without any action. */ - @JsonProperty(value = "defaultActivities") private List defaultActivities; /** @@ -132,4 +133,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SwitchActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("on", this.on); + jsonWriter.writeArrayField("cases", this.cases, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("defaultActivities", this.defaultActivities, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SwitchActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SwitchActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SwitchActivityTypeProperties. + */ + public static SwitchActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SwitchActivityTypeProperties deserializedSwitchActivityTypeProperties = new SwitchActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("on".equals(fieldName)) { + deserializedSwitchActivityTypeProperties.on = Expression.fromJson(reader); + } else if ("cases".equals(fieldName)) { + List cases = reader.readArray(reader1 -> SwitchCase.fromJson(reader1)); + deserializedSwitchActivityTypeProperties.cases = cases; + } else if ("defaultActivities".equals(fieldName)) { + List defaultActivities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedSwitchActivityTypeProperties.defaultActivities = defaultActivities; + } else { + reader.skipChildren(); + } + } + + return deserializedSwitchActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java index 833388a56fe8..9c10577fcc75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseLinkedServiceTypeProperties.java @@ -6,56 +6,53 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.SybaseAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Sybase linked service properties. */ @Fluent -public final class SybaseLinkedServiceTypeProperties { +public final class SybaseLinkedServiceTypeProperties implements JsonSerializable { /* * Server name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server", required = true) private Object server; /* * Database name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database", required = true) private Object database; /* * Schema name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * AuthenticationType to be used for connection. */ - @JsonProperty(value = "authenticationType") private SybaseAuthenticationType authenticationType; /* * Username for authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -228,4 +225,62 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SybaseLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SybaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SybaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SybaseLinkedServiceTypeProperties. + */ + public static SybaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SybaseLinkedServiceTypeProperties deserializedSybaseLinkedServiceTypeProperties + = new SybaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.schema = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.authenticationType + = SybaseAuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedSybaseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSybaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseTableDatasetTypeProperties.java index 08d3db009e9a..fbeccdd2d4e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SybaseTableDatasetTypeProperties.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sybase table dataset properties. */ @Fluent -public final class SybaseTableDatasetTypeProperties { +public final class SybaseTableDatasetTypeProperties implements JsonSerializable { /* * The Sybase table name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "tableName") private Object tableName; /** @@ -51,4 +54,41 @@ public SybaseTableDatasetTypeProperties withTableName(Object tableName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SybaseTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SybaseTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SybaseTableDatasetTypeProperties. + */ + public static SybaseTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SybaseTableDatasetTypeProperties deserializedSybaseTableDatasetTypeProperties + = new SybaseTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedSybaseTableDatasetTypeProperties.tableName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSybaseTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java index 3a812fcd9010..c03762d09142 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java @@ -6,37 +6,37 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; import com.azure.resourcemanager.datafactory.models.ConfigurationType; import com.azure.resourcemanager.datafactory.models.NotebookParameter; import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference; import com.azure.resourcemanager.datafactory.models.SynapseNotebookReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Execute Synapse notebook activity properties. */ @Fluent -public final class SynapseNotebookActivityTypeProperties { +public final class SynapseNotebookActivityTypeProperties + implements JsonSerializable { /* * Synapse notebook reference. */ - @JsonProperty(value = "notebook", required = true) private SynapseNotebookReference notebook; /* * The name of the big data pool which will be used to execute the notebook. */ - @JsonProperty(value = "sparkPool") private BigDataPoolParametrizationReference sparkPool; /* * Notebook parameters. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* @@ -44,13 +44,11 @@ public final class SynapseNotebookActivityTypeProperties { * will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "executorSize") private Object executorSize; /* * Spark configuration properties, which will override the 'conf' of the notebook you provide. */ - @JsonProperty(value = "conf") private Object conf; /* @@ -58,33 +56,27 @@ public final class SynapseNotebookActivityTypeProperties { * be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "driverSize") private Object driverSize; /* * Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you * provide. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "numExecutors") private Object numExecutors; /* * The type of the spark config. */ - @JsonProperty(value = "configurationType") private ConfigurationType configurationType; /* * The spark configuration of the spark job. */ - @JsonProperty(value = "targetSparkConfiguration") private SparkConfigurationParametrizationReference targetSparkConfiguration; /* * Spark configuration property. */ - @JsonProperty(value = "sparkConfig") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map sparkConfig; /** @@ -335,4 +327,77 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("notebook", this.notebook); + jsonWriter.writeJsonField("sparkPool", this.sparkPool); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeUntypedField("executorSize", this.executorSize); + jsonWriter.writeUntypedField("conf", this.conf); + jsonWriter.writeUntypedField("driverSize", this.driverSize); + jsonWriter.writeUntypedField("numExecutors", this.numExecutors); + jsonWriter.writeStringField("configurationType", + this.configurationType == null ? null : this.configurationType.toString()); + jsonWriter.writeJsonField("targetSparkConfiguration", this.targetSparkConfiguration); + jsonWriter.writeMapField("sparkConfig", this.sparkConfig, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseNotebookActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseNotebookActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseNotebookActivityTypeProperties. + */ + public static SynapseNotebookActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseNotebookActivityTypeProperties deserializedSynapseNotebookActivityTypeProperties + = new SynapseNotebookActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("notebook".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.notebook + = SynapseNotebookReference.fromJson(reader); + } else if ("sparkPool".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.sparkPool + = BigDataPoolParametrizationReference.fromJson(reader); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> NotebookParameter.fromJson(reader1)); + deserializedSynapseNotebookActivityTypeProperties.parameters = parameters; + } else if ("executorSize".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.executorSize = reader.readUntyped(); + } else if ("conf".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.conf = reader.readUntyped(); + } else if ("driverSize".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.driverSize = reader.readUntyped(); + } else if ("numExecutors".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.numExecutors = reader.readUntyped(); + } else if ("configurationType".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.configurationType + = ConfigurationType.fromString(reader.getString()); + } else if ("targetSparkConfiguration".equals(fieldName)) { + deserializedSynapseNotebookActivityTypeProperties.targetSparkConfiguration + = SparkConfigurationParametrizationReference.fromJson(reader); + } else if ("sparkConfig".equals(fieldName)) { + Map sparkConfig = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSynapseNotebookActivityTypeProperties.sparkConfig = sparkConfig; + } else { + reader.skipChildren(); + } + } + + return deserializedSynapseNotebookActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java index 896e8471e80c..aee8169f53cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java @@ -6,12 +6,15 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; import com.azure.resourcemanager.datafactory.models.ConfigurationType; import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference; import com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -19,24 +22,22 @@ * Execute spark job activity properties. */ @Fluent -public final class SynapseSparkJobActivityTypeProperties { +public final class SynapseSparkJobActivityTypeProperties + implements JsonSerializable { /* * Synapse spark job reference. */ - @JsonProperty(value = "sparkJob", required = true) private SynapseSparkJobReference sparkJob; /* * User specified arguments to SynapseSparkJobDefinitionActivity. */ - @JsonProperty(value = "args") private List arguments; /* * The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "file") private Object file; /* @@ -44,42 +45,36 @@ public final class SynapseSparkJobActivityTypeProperties { * files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case * sensitive. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "scanFolder") private Object scanFolder; /* * The fully-qualified identifier or the main class that is in the main definition file, which will override the * 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "className") private Object className; /* * (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main * definition file, which will override the 'files' of the spark job definition you provide. */ - @JsonProperty(value = "files") private List files; /* * Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of * the spark job definition you provide. */ - @JsonProperty(value = "pythonCodeReference") private List pythonCodeReference; /* * Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of * the spark job definition you provide. */ - @JsonProperty(value = "filesV2") private List filesV2; /* * The name of the big data pool which will be used to execute the spark batch job, which will override the * 'targetBigDataPool' of the spark job definition you provide. */ - @JsonProperty(value = "targetBigDataPool") private BigDataPoolParametrizationReference targetBigDataPool; /* @@ -87,13 +82,11 @@ public final class SynapseSparkJobActivityTypeProperties { * be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "executorSize") private Object executorSize; /* * Spark configuration properties, which will override the 'conf' of the spark job definition you provide. */ - @JsonProperty(value = "conf") private Object conf; /* @@ -101,33 +94,27 @@ public final class SynapseSparkJobActivityTypeProperties { * used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "driverSize") private Object driverSize; /* * Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition * you provide. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "numExecutors") private Object numExecutors; /* * The type of the spark config. */ - @JsonProperty(value = "configurationType") private ConfigurationType configurationType; /* * The spark configuration of the spark job. */ - @JsonProperty(value = "targetSparkConfiguration") private SparkConfigurationParametrizationReference targetSparkConfiguration; /* * Spark configuration property. */ - @JsonProperty(value = "sparkConfig") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map sparkConfig; /** @@ -510,4 +497,98 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("sparkJob", this.sparkJob); + jsonWriter.writeArrayField("args", this.arguments, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("file", this.file); + jsonWriter.writeUntypedField("scanFolder", this.scanFolder); + jsonWriter.writeUntypedField("className", this.className); + jsonWriter.writeArrayField("files", this.files, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("pythonCodeReference", this.pythonCodeReference, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("filesV2", this.filesV2, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("targetBigDataPool", this.targetBigDataPool); + jsonWriter.writeUntypedField("executorSize", this.executorSize); + jsonWriter.writeUntypedField("conf", this.conf); + jsonWriter.writeUntypedField("driverSize", this.driverSize); + jsonWriter.writeUntypedField("numExecutors", this.numExecutors); + jsonWriter.writeStringField("configurationType", + this.configurationType == null ? null : this.configurationType.toString()); + jsonWriter.writeJsonField("targetSparkConfiguration", this.targetSparkConfiguration); + jsonWriter.writeMapField("sparkConfig", this.sparkConfig, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseSparkJobActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseSparkJobActivityTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseSparkJobActivityTypeProperties. + */ + public static SynapseSparkJobActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseSparkJobActivityTypeProperties deserializedSynapseSparkJobActivityTypeProperties + = new SynapseSparkJobActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sparkJob".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.sparkJob + = SynapseSparkJobReference.fromJson(reader); + } else if ("args".equals(fieldName)) { + List arguments = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSynapseSparkJobActivityTypeProperties.arguments = arguments; + } else if ("file".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.file = reader.readUntyped(); + } else if ("scanFolder".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.scanFolder = reader.readUntyped(); + } else if ("className".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.className = reader.readUntyped(); + } else if ("files".equals(fieldName)) { + List files = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSynapseSparkJobActivityTypeProperties.files = files; + } else if ("pythonCodeReference".equals(fieldName)) { + List pythonCodeReference = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSynapseSparkJobActivityTypeProperties.pythonCodeReference = pythonCodeReference; + } else if ("filesV2".equals(fieldName)) { + List filesV2 = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSynapseSparkJobActivityTypeProperties.filesV2 = filesV2; + } else if ("targetBigDataPool".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.targetBigDataPool + = BigDataPoolParametrizationReference.fromJson(reader); + } else if ("executorSize".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.executorSize = reader.readUntyped(); + } else if ("conf".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.conf = reader.readUntyped(); + } else if ("driverSize".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.driverSize = reader.readUntyped(); + } else if ("numExecutors".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.numExecutors = reader.readUntyped(); + } else if ("configurationType".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.configurationType + = ConfigurationType.fromString(reader.getString()); + } else if ("targetSparkConfiguration".equals(fieldName)) { + deserializedSynapseSparkJobActivityTypeProperties.targetSparkConfiguration + = SparkConfigurationParametrizationReference.fromJson(reader); + } else if ("sparkConfig".equals(fieldName)) { + Map sparkConfig = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSynapseSparkJobActivityTypeProperties.sparkConfig = sparkConfig; + } else { + reader.skipChildren(); + } + } + + return deserializedSynapseSparkJobActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java index 6510b27f59de..b31df3093bca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeamDeskLinkedServiceTypeProperties.java @@ -6,50 +6,49 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.TeamDeskAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * TeamDesk linked service type properties. */ @Fluent -public final class TeamDeskLinkedServiceTypeProperties { +public final class TeamDeskLinkedServiceTypeProperties + implements JsonSerializable { /* * The authentication type to use. */ - @JsonProperty(value = "authenticationType", required = true) private TeamDeskAuthenticationType authenticationType; /* * The url to connect TeamDesk source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The username of the TeamDesk source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The password of the TeamDesk source. */ - @JsonProperty(value = "password") private SecretBase password; /* * The api token for the TeamDesk source. */ - @JsonProperty(value = "apiToken") private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -207,4 +206,59 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TeamDeskLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeamDeskLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeamDeskLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TeamDeskLinkedServiceTypeProperties. + */ + public static TeamDeskLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeamDeskLinkedServiceTypeProperties deserializedTeamDeskLinkedServiceTypeProperties + = new TeamDeskLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("authenticationType".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.authenticationType + = TeamDeskAuthenticationType.fromString(reader.getString()); + } else if ("url".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("apiToken".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedTeamDeskLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTeamDeskLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java index e7ac9dcf671c..60a6b021041c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataLinkedServiceTypeProperties.java @@ -5,50 +5,49 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.TeradataAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Teradata linked service properties. */ @Fluent -public final class TeradataLinkedServiceTypeProperties { +public final class TeradataLinkedServiceTypeProperties + implements JsonSerializable { /* * Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * Server name for connection. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * AuthenticationType to be used for connection. */ - @JsonProperty(value = "authenticationType") private TeradataAuthenticationType authenticationType; /* * Username for authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for authentication. */ - @JsonProperty(value = "password") private SecretBase password; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -191,4 +190,58 @@ public void validate() { password().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the TeradataLinkedServiceTypeProperties. + */ + public static TeradataLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataLinkedServiceTypeProperties deserializedTeradataLinkedServiceTypeProperties + = new TeradataLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("server".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.authenticationType + = TeradataAuthenticationType.fromString(reader.getString()); + } else if ("username".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedTeradataLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTeradataLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataTableDatasetTypeProperties.java index 413471f30085..1e8238b794b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TeradataTableDatasetTypeProperties.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Teradata dataset properties. */ @Fluent -public final class TeradataTableDatasetTypeProperties { +public final class TeradataTableDatasetTypeProperties implements JsonSerializable { /* * The database name of Teradata. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database") private Object database; /* * The table name of Teradata. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -77,4 +79,44 @@ public TeradataTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the TeradataTableDatasetTypeProperties. + */ + public static TeradataTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataTableDatasetTypeProperties deserializedTeradataTableDatasetTypeProperties + = new TeradataTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("database".equals(fieldName)) { + deserializedTeradataTableDatasetTypeProperties.database = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedTeradataTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedTeradataTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java index 06711d6c31f1..27ffa1043d4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerQueryResponseInner.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A query of triggers. */ @Fluent -public final class TriggerQueryResponseInner { +public final class TriggerQueryResponseInner implements JsonSerializable { /* * List of triggers. */ - @JsonProperty(value = "value", required = true) private List value; /* * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /** @@ -90,4 +92,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerQueryResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerQueryResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerQueryResponseInner if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerQueryResponseInner. + */ + public static TriggerQueryResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerQueryResponseInner deserializedTriggerQueryResponseInner = new TriggerQueryResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> TriggerResourceInner.fromJson(reader1)); + deserializedTriggerQueryResponseInner.value = value; + } else if ("continuationToken".equals(fieldName)) { + deserializedTriggerQueryResponseInner.continuationToken = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerQueryResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java index c577ba00b1f8..5b38c84858c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerResourceInner.java @@ -7,8 +7,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Trigger; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Trigger resource type. @@ -18,25 +21,21 @@ public final class TriggerResourceInner extends SubResource { /* * Properties of the trigger. */ - @JsonProperty(value = "properties", required = true) private Trigger properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -117,4 +116,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerResourceInner. + */ + public static TriggerResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerResourceInner deserializedTriggerResourceInner = new TriggerResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedTriggerResourceInner.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedTriggerResourceInner.properties = Trigger.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedTriggerResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedTriggerResourceInner.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedTriggerResourceInner.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerResourceInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java index 98f4fea6d67f..672f71b630ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerRunsQueryResponseInner.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.TriggerRun; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of trigger runs. */ @Fluent -public final class TriggerRunsQueryResponseInner { +public final class TriggerRunsQueryResponseInner implements JsonSerializable { /* * List of trigger runs. */ - @JsonProperty(value = "value", required = true) private List value; /* * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /** @@ -91,4 +93,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerRunsQueryResponseInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerRunsQueryResponseInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerRunsQueryResponseInner if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerRunsQueryResponseInner. + */ + public static TriggerRunsQueryResponseInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerRunsQueryResponseInner deserializedTriggerRunsQueryResponseInner + = new TriggerRunsQueryResponseInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> TriggerRun.fromJson(reader1)); + deserializedTriggerRunsQueryResponseInner.value = value; + } else if ("continuationToken".equals(fieldName)) { + deserializedTriggerRunsQueryResponseInner.continuationToken = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerRunsQueryResponseInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerSubscriptionOperationStatusInner.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerSubscriptionOperationStatusInner.java index 4806cb80c110..b7bf703e862a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerSubscriptionOperationStatusInner.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TriggerSubscriptionOperationStatusInner.java @@ -5,24 +5,27 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Immutable; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.EventSubscriptionStatus; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Defines the response of a trigger subscription operation. */ @Immutable -public final class TriggerSubscriptionOperationStatusInner { +public final class TriggerSubscriptionOperationStatusInner + implements JsonSerializable { /* * Trigger name. */ - @JsonProperty(value = "triggerName", access = JsonProperty.Access.WRITE_ONLY) private String triggerName; /* * Event Subscription Status. */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private EventSubscriptionStatus status; /** @@ -56,4 +59,43 @@ public EventSubscriptionStatus status() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerSubscriptionOperationStatusInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerSubscriptionOperationStatusInner if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the TriggerSubscriptionOperationStatusInner. + */ + public static TriggerSubscriptionOperationStatusInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerSubscriptionOperationStatusInner deserializedTriggerSubscriptionOperationStatusInner + = new TriggerSubscriptionOperationStatusInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("triggerName".equals(fieldName)) { + deserializedTriggerSubscriptionOperationStatusInner.triggerName = reader.getString(); + } else if ("status".equals(fieldName)) { + deserializedTriggerSubscriptionOperationStatusInner.status + = EventSubscriptionStatus.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerSubscriptionOperationStatusInner; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java index 7e4b996c1006..6db347a95c8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TumblingWindowTriggerTypeProperties.java @@ -5,43 +5,46 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DependencyReference; import com.azure.resourcemanager.datafactory.models.RetryPolicy; import com.azure.resourcemanager.datafactory.models.TumblingWindowFrequency; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; import java.util.List; /** * Tumbling Window Trigger properties. */ @Fluent -public final class TumblingWindowTriggerTypeProperties { +public final class TumblingWindowTriggerTypeProperties + implements JsonSerializable { /* * The frequency of the time windows. */ - @JsonProperty(value = "frequency", required = true) private TumblingWindowFrequency frequency; /* * The interval of the time windows. The minimum interval allowed is 15 Minutes. */ - @JsonProperty(value = "interval", required = true) private int interval; /* * The start time for the time period for the trigger during which events are fired for windows that are ready. Only * UTC time is currently supported. */ - @JsonProperty(value = "startTime", required = true) private OffsetDateTime startTime; /* * The end time for the time period for the trigger during which events are fired for windows that are ready. Only * UTC time is currently supported. */ - @JsonProperty(value = "endTime") private OffsetDateTime endTime; /* @@ -49,25 +52,21 @@ public final class TumblingWindowTriggerTypeProperties { * end time. The default is 0. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "delay") private Object delay; /* * The max number of parallel time windows (ready for execution) for which a new run is triggered. */ - @JsonProperty(value = "maxConcurrency", required = true) private int maxConcurrency; /* * Retry policy that will be applied for failed pipeline runs. */ - @JsonProperty(value = "retryPolicy") private RetryPolicy retryPolicy; /* * Triggers that this trigger depends on. Only tumbling window triggers are supported. */ - @JsonProperty(value = "dependsOn") private List dependsOn; /** @@ -271,4 +270,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TumblingWindowTriggerTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("frequency", this.frequency == null ? null : this.frequency.toString()); + jsonWriter.writeIntField("interval", this.interval); + jsonWriter.writeStringField("startTime", + this.startTime == null ? null : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.startTime)); + jsonWriter.writeIntField("maxConcurrency", this.maxConcurrency); + jsonWriter.writeStringField("endTime", + this.endTime == null ? null : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.endTime)); + jsonWriter.writeUntypedField("delay", this.delay); + jsonWriter.writeJsonField("retryPolicy", this.retryPolicy); + jsonWriter.writeArrayField("dependsOn", this.dependsOn, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TumblingWindowTriggerTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TumblingWindowTriggerTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TumblingWindowTriggerTypeProperties. + */ + public static TumblingWindowTriggerTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TumblingWindowTriggerTypeProperties deserializedTumblingWindowTriggerTypeProperties + = new TumblingWindowTriggerTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("frequency".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.frequency + = TumblingWindowFrequency.fromString(reader.getString()); + } else if ("interval".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.interval = reader.getInt(); + } else if ("startTime".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.startTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("maxConcurrency".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.maxConcurrency = reader.getInt(); + } else if ("endTime".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.endTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("delay".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.delay = reader.readUntyped(); + } else if ("retryPolicy".equals(fieldName)) { + deserializedTumblingWindowTriggerTypeProperties.retryPolicy = RetryPolicy.fromJson(reader); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> DependencyReference.fromJson(reader1)); + deserializedTumblingWindowTriggerTypeProperties.dependsOn = dependsOn; + } else { + reader.skipChildren(); + } + } + + return deserializedTumblingWindowTriggerTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TwilioLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TwilioLinkedServiceTypeProperties.java index 16f5675a38e6..ffbcb224d7b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TwilioLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/TwilioLinkedServiceTypeProperties.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Twilio linked service type properties. */ @Fluent -public final class TwilioLinkedServiceTypeProperties { +public final class TwilioLinkedServiceTypeProperties implements JsonSerializable { /* * The Account SID of Twilio service. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName", required = true) private Object username; /* * The auth token of Twilio service. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /** @@ -95,4 +97,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TwilioLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TwilioLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TwilioLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TwilioLinkedServiceTypeProperties. + */ + public static TwilioLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TwilioLinkedServiceTypeProperties deserializedTwilioLinkedServiceTypeProperties + = new TwilioLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("userName".equals(fieldName)) { + deserializedTwilioLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedTwilioLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedTwilioLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/UntilActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/UntilActivityTypeProperties.java index cdfd7e9d5e71..11a0f362ad52 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/UntilActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/UntilActivityTypeProperties.java @@ -6,20 +6,23 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.Activity; import com.azure.resourcemanager.datafactory.models.Expression; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * Until activity properties. */ @Fluent -public final class UntilActivityTypeProperties { +public final class UntilActivityTypeProperties implements JsonSerializable { /* * An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true */ - @JsonProperty(value = "expression", required = true) private Expression expression; /* @@ -27,13 +30,11 @@ public final class UntilActivityTypeProperties { * TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "timeout") private Object timeout; /* * List of activities to execute. */ - @JsonProperty(value = "activities", required = true) private List activities; /** @@ -131,4 +132,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(UntilActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("expression", this.expression); + jsonWriter.writeArrayField("activities", this.activities, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeUntypedField("timeout", this.timeout); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UntilActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UntilActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the UntilActivityTypeProperties. + */ + public static UntilActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UntilActivityTypeProperties deserializedUntilActivityTypeProperties = new UntilActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("expression".equals(fieldName)) { + deserializedUntilActivityTypeProperties.expression = Expression.fromJson(reader); + } else if ("activities".equals(fieldName)) { + List activities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedUntilActivityTypeProperties.activities = activities; + } else if ("timeout".equals(fieldName)) { + deserializedUntilActivityTypeProperties.timeout = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedUntilActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java index aacbc0ff10f6..71d49021d87d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ValidationActivityTypeProperties.java @@ -6,47 +6,46 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Validation activity properties. */ @Fluent -public final class ValidationActivityTypeProperties { +public final class ValidationActivityTypeProperties implements JsonSerializable { /* * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of * TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "timeout") private Object timeout; /* * A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. * Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "sleep") private Object sleep; /* * Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. * Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "minimumSize") private Object minimumSize; /* * Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to * false, the folder must be empty. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "childItems") private Object childItems; /* * Validation activity dataset reference. */ - @JsonProperty(value = "dataset", required = true) private DatasetReference dataset; /** @@ -181,4 +180,54 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ValidationActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("dataset", this.dataset); + jsonWriter.writeUntypedField("timeout", this.timeout); + jsonWriter.writeUntypedField("sleep", this.sleep); + jsonWriter.writeUntypedField("minimumSize", this.minimumSize); + jsonWriter.writeUntypedField("childItems", this.childItems); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ValidationActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ValidationActivityTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ValidationActivityTypeProperties. + */ + public static ValidationActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ValidationActivityTypeProperties deserializedValidationActivityTypeProperties + = new ValidationActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataset".equals(fieldName)) { + deserializedValidationActivityTypeProperties.dataset = DatasetReference.fromJson(reader); + } else if ("timeout".equals(fieldName)) { + deserializedValidationActivityTypeProperties.timeout = reader.readUntyped(); + } else if ("sleep".equals(fieldName)) { + deserializedValidationActivityTypeProperties.sleep = reader.readUntyped(); + } else if ("minimumSize".equals(fieldName)) { + deserializedValidationActivityTypeProperties.minimumSize = reader.readUntyped(); + } else if ("childItems".equals(fieldName)) { + deserializedValidationActivityTypeProperties.childItems = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedValidationActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaDatasetTypeProperties.java index 9785ada47798..98ccae80c3f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaDatasetTypeProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Vertica Properties. */ @Fluent -public final class VerticaDatasetTypeProperties { +public final class VerticaDatasetTypeProperties implements JsonSerializable { /* * This property will be retired. Please consider using schema + table properties instead. */ - @JsonProperty(value = "tableName") private Object tableName; /* * The table name of the Vertica. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /* * The schema name of the Vertica. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /** @@ -105,4 +106,46 @@ public VerticaDatasetTypeProperties withSchema(Object schema) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("tableName", this.tableName); + jsonWriter.writeUntypedField("table", this.table); + jsonWriter.writeUntypedField("schema", this.schema); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VerticaDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VerticaDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the VerticaDatasetTypeProperties. + */ + public static VerticaDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VerticaDatasetTypeProperties deserializedVerticaDatasetTypeProperties = new VerticaDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tableName".equals(fieldName)) { + deserializedVerticaDatasetTypeProperties.tableName = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedVerticaDatasetTypeProperties.table = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedVerticaDatasetTypeProperties.schema = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedVerticaDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java index 25ffef5d2b69..825a27d6b34b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/VerticaLinkedServiceTypeProperties.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Vertica linked service properties. */ @Fluent -public final class VerticaLinkedServiceTypeProperties { +public final class VerticaLinkedServiceTypeProperties implements JsonSerializable { /* * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - @JsonProperty(value = "connectionString") private Object connectionString; /* * The Azure key vault secret reference of password in connection string. */ - @JsonProperty(value = "pwd") private AzureKeyVaultSecretReference pwd; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -112,4 +113,47 @@ public void validate() { pwd().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionString", this.connectionString); + jsonWriter.writeJsonField("pwd", this.pwd); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VerticaLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VerticaLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the VerticaLinkedServiceTypeProperties. + */ + public static VerticaLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VerticaLinkedServiceTypeProperties deserializedVerticaLinkedServiceTypeProperties + = new VerticaLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionString".equals(fieldName)) { + deserializedVerticaLinkedServiceTypeProperties.connectionString = reader.readUntyped(); + } else if ("pwd".equals(fieldName)) { + deserializedVerticaLinkedServiceTypeProperties.pwd = AzureKeyVaultSecretReference.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedVerticaLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedVerticaLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java index 68f2868f18ed..c204f512be0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WaitActivityTypeProperties.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Wait activity properties. */ @Fluent -public final class WaitActivityTypeProperties { +public final class WaitActivityTypeProperties implements JsonSerializable { /* * Duration in seconds. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "waitTimeInSeconds", required = true) private Object waitTimeInSeconds; /** @@ -59,4 +62,41 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WaitActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("waitTimeInSeconds", this.waitTimeInSeconds); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WaitActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WaitActivityTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WaitActivityTypeProperties. + */ + public static WaitActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WaitActivityTypeProperties deserializedWaitActivityTypeProperties = new WaitActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("waitTimeInSeconds".equals(fieldName)) { + deserializedWaitActivityTypeProperties.waitTimeInSeconds = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedWaitActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java index dda0be066974..2ad51c4fc746 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseLinkedServiceTypeProperties.java @@ -6,64 +6,61 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Microsoft Fabric Warehouse linked service properties. */ @Fluent -public final class WarehouseLinkedServiceTypeProperties { +public final class WarehouseLinkedServiceTypeProperties + implements JsonSerializable { /* * The ID of Microsoft Fabric Warehouse artifact. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "artifactId", required = true) private Object artifactId; /* * The endpoint of Microsoft Fabric Warehouse server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "endpoint", required = true) private Object endpoint; /* * The ID of Microsoft Fabric workspace. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "workspaceId") private Object workspaceId; /* * The ID of the application used to authenticate against Microsoft Fabric Warehouse. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The Key of the application used to authenticate against Microsoft Fabric Warehouse. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tenant") private Object tenant; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /* * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for * key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalCredentialType") private Object servicePrincipalCredentialType; /* @@ -72,7 +69,6 @@ public final class WarehouseLinkedServiceTypeProperties { * servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be * AzureKeyVaultSecretReference. */ - @JsonProperty(value = "servicePrincipalCredential") private SecretBase servicePrincipalCredential; /** @@ -311,4 +307,68 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WarehouseLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("artifactId", this.artifactId); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeUntypedField("workspaceId", this.workspaceId); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeUntypedField("tenant", this.tenant); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + jsonWriter.writeUntypedField("servicePrincipalCredentialType", this.servicePrincipalCredentialType); + jsonWriter.writeJsonField("servicePrincipalCredential", this.servicePrincipalCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WarehouseLinkedServiceTypeProperties. + */ + public static WarehouseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseLinkedServiceTypeProperties deserializedWarehouseLinkedServiceTypeProperties + = new WarehouseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("artifactId".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.artifactId = reader.readUntyped(); + } else if ("endpoint".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("workspaceId".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.workspaceId = reader.readUntyped(); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("tenant".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.tenant = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else if ("servicePrincipalCredentialType".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.servicePrincipalCredentialType + = reader.readUntyped(); + } else if ("servicePrincipalCredential".equals(fieldName)) { + deserializedWarehouseLinkedServiceTypeProperties.servicePrincipalCredential + = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedWarehouseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseTableDatasetTypeProperties.java index 3a43441680fe..edf3f288af94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WarehouseTableDatasetTypeProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Microsoft Fabric Warehouse dataset properties. */ @Fluent -public final class WarehouseTableDatasetTypeProperties { +public final class WarehouseTableDatasetTypeProperties + implements JsonSerializable { /* * The schema name of the Microsoft Fabric Warehouse. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "schema") private Object schema; /* * The table name of the Microsoft Fabric Warehouse. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "table") private Object table; /** @@ -81,4 +84,44 @@ public WarehouseTableDatasetTypeProperties withTable(Object table) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeUntypedField("table", this.table); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the WarehouseTableDatasetTypeProperties. + */ + public static WarehouseTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseTableDatasetTypeProperties deserializedWarehouseTableDatasetTypeProperties + = new WarehouseTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("schema".equals(fieldName)) { + deserializedWarehouseTableDatasetTypeProperties.schema = reader.readUntyped(); + } else if ("table".equals(fieldName)) { + deserializedWarehouseTableDatasetTypeProperties.table = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedWarehouseTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java index 843aad8822c6..08a3be1ad6b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebActivityTypeProperties.java @@ -6,13 +6,16 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetReference; import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; import com.azure.resourcemanager.datafactory.models.WebActivityAuthentication; import com.azure.resourcemanager.datafactory.models.WebActivityMethod; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -20,17 +23,15 @@ * Web activity type properties. */ @Fluent -public final class WebActivityTypeProperties { +public final class WebActivityTypeProperties implements JsonSerializable { /* * Rest API method for target endpoint. */ - @JsonProperty(value = "method", required = true) private WebActivityMethod method; /* * Web activity target endpoint and path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* @@ -38,27 +39,22 @@ public final class WebActivityTypeProperties { * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "headers") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map headers; /* * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET * method Type: string (or Expression with resultType string). */ - @JsonProperty(value = "body") private Object body; /* * Authentication method used for calling the endpoint. */ - @JsonProperty(value = "authentication") private WebActivityAuthentication authentication; /* * When set to true, Certificate validation will be disabled. */ - @JsonProperty(value = "disableCertValidation") private Boolean disableCertValidation; /* @@ -66,7 +62,6 @@ public final class WebActivityTypeProperties { * get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 * minutes */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* @@ -74,25 +69,21 @@ public final class WebActivityTypeProperties { * stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET * call on location given in http response headers. */ - @JsonProperty(value = "turnOffAsync") private Boolean turnOffAsync; /* * List of datasets passed to web endpoint. */ - @JsonProperty(value = "datasets") private List datasets; /* * List of linked services passed to web endpoint. */ - @JsonProperty(value = "linkedServices") private List linkedServices; /* * The integration runtime reference. */ - @JsonProperty(value = "connectVia") private IntegrationRuntimeReference connectVia; /** @@ -365,4 +356,77 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("method", this.method == null ? null : this.method.toString()); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeMapField("headers", this.headers, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("body", this.body); + jsonWriter.writeJsonField("authentication", this.authentication); + jsonWriter.writeBooleanField("disableCertValidation", this.disableCertValidation); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + jsonWriter.writeBooleanField("turnOffAsync", this.turnOffAsync); + jsonWriter.writeArrayField("datasets", this.datasets, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("linkedServices", this.linkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("connectVia", this.connectVia); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebActivityTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebActivityTypeProperties. + */ + public static WebActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebActivityTypeProperties deserializedWebActivityTypeProperties = new WebActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("method".equals(fieldName)) { + deserializedWebActivityTypeProperties.method = WebActivityMethod.fromString(reader.getString()); + } else if ("url".equals(fieldName)) { + deserializedWebActivityTypeProperties.url = reader.readUntyped(); + } else if ("headers".equals(fieldName)) { + Map headers = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedWebActivityTypeProperties.headers = headers; + } else if ("body".equals(fieldName)) { + deserializedWebActivityTypeProperties.body = reader.readUntyped(); + } else if ("authentication".equals(fieldName)) { + deserializedWebActivityTypeProperties.authentication = WebActivityAuthentication.fromJson(reader); + } else if ("disableCertValidation".equals(fieldName)) { + deserializedWebActivityTypeProperties.disableCertValidation + = reader.getNullable(JsonReader::getBoolean); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedWebActivityTypeProperties.httpRequestTimeout = reader.readUntyped(); + } else if ("turnOffAsync".equals(fieldName)) { + deserializedWebActivityTypeProperties.turnOffAsync = reader.getNullable(JsonReader::getBoolean); + } else if ("datasets".equals(fieldName)) { + List datasets = reader.readArray(reader1 -> DatasetReference.fromJson(reader1)); + deserializedWebActivityTypeProperties.datasets = datasets; + } else if ("linkedServices".equals(fieldName)) { + List linkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedWebActivityTypeProperties.linkedServices = linkedServices; + } else if ("connectVia".equals(fieldName)) { + deserializedWebActivityTypeProperties.connectVia = IntegrationRuntimeReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedWebActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java index 7032993f054f..e923944703f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebTableDatasetTypeProperties.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Web table dataset properties. */ @Fluent -public final class WebTableDatasetTypeProperties { +public final class WebTableDatasetTypeProperties implements JsonSerializable { /* * The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), * minimum: 0. */ - @JsonProperty(value = "index", required = true) private Object index; /* * The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "path") private Object path; /** @@ -91,4 +93,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebTableDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("index", this.index); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebTableDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebTableDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebTableDatasetTypeProperties. + */ + public static WebTableDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebTableDatasetTypeProperties deserializedWebTableDatasetTypeProperties + = new WebTableDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("index".equals(fieldName)) { + deserializedWebTableDatasetTypeProperties.index = reader.readUntyped(); + } else if ("path".equals(fieldName)) { + deserializedWebTableDatasetTypeProperties.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedWebTableDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java index 88d029ae13e1..c39bb7a716f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/WebhookActivityTypeProperties.java @@ -6,34 +6,34 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.WebActivityAuthentication; import com.azure.resourcemanager.datafactory.models.WebhookActivityMethod; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * WebHook activity type properties. */ @Fluent -public final class WebhookActivityTypeProperties { +public final class WebhookActivityTypeProperties implements JsonSerializable { /* * Rest API method for target endpoint. */ - @JsonProperty(value = "method", required = true) private WebhookActivityMethod method; /* * WebHook activity target endpoint and path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 * minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "timeout") private String timeout; /* @@ -41,21 +41,17 @@ public final class WebhookActivityTypeProperties { * "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "headers") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map headers; /* * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET * method Type: string (or Expression with resultType string). */ - @JsonProperty(value = "body") private Object body; /* * Authentication method used for calling the endpoint. */ - @JsonProperty(value = "authentication") private WebActivityAuthentication authentication; /* @@ -63,7 +59,6 @@ public final class WebhookActivityTypeProperties { * activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "reportStatusOnCallBack") private Object reportStatusOnCallBack; /** @@ -250,4 +245,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebhookActivityTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("method", this.method == null ? null : this.method.toString()); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeStringField("timeout", this.timeout); + jsonWriter.writeMapField("headers", this.headers, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("body", this.body); + jsonWriter.writeJsonField("authentication", this.authentication); + jsonWriter.writeUntypedField("reportStatusOnCallBack", this.reportStatusOnCallBack); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebhookActivityTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebhookActivityTypeProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebhookActivityTypeProperties. + */ + public static WebhookActivityTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebhookActivityTypeProperties deserializedWebhookActivityTypeProperties + = new WebhookActivityTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("method".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.method + = WebhookActivityMethod.fromString(reader.getString()); + } else if ("url".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.url = reader.readUntyped(); + } else if ("timeout".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.timeout = reader.getString(); + } else if ("headers".equals(fieldName)) { + Map headers = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedWebhookActivityTypeProperties.headers = headers; + } else if ("body".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.body = reader.readUntyped(); + } else if ("authentication".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.authentication + = WebActivityAuthentication.fromJson(reader); + } else if ("reportStatusOnCallBack".equals(fieldName)) { + deserializedWebhookActivityTypeProperties.reportStatusOnCallBack = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedWebhookActivityTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java index 3a90a4da8637..4719a05a75db 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XeroLinkedServiceTypeProperties.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Xero Service linked service properties. */ @Fluent -public final class XeroLinkedServiceTypeProperties { +public final class XeroLinkedServiceTypeProperties implements JsonSerializable { /* * Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. * Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The endpoint of the Xero server. (i.e. api.xero.com) */ - @JsonProperty(value = "host") private Object host; /* * The consumer key associated with the Xero application. */ - @JsonProperty(value = "consumerKey") private SecretBase consumerKey; /* @@ -37,33 +38,28 @@ public final class XeroLinkedServiceTypeProperties { * text from the .pem file, including the Unix line endings( * ). */ - @JsonProperty(value = "privateKey") private SecretBase privateKey; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -259,4 +255,62 @@ public void validate() { privateKey().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("host", this.host); + jsonWriter.writeJsonField("consumerKey", this.consumerKey); + jsonWriter.writeJsonField("privateKey", this.privateKey); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XeroLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XeroLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the XeroLinkedServiceTypeProperties. + */ + public static XeroLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XeroLinkedServiceTypeProperties deserializedXeroLinkedServiceTypeProperties + = new XeroLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("host".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.host = reader.readUntyped(); + } else if ("consumerKey".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.consumerKey = SecretBase.fromJson(reader); + } else if ("privateKey".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.privateKey = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedXeroLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedXeroLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java index f8d97aac1b43..1daec0d1c4ae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/XmlDatasetTypeProperties.java @@ -6,19 +6,22 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.DatasetCompression; import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Xml dataset properties. */ @Fluent -public final class XmlDatasetTypeProperties { +public final class XmlDatasetTypeProperties implements JsonSerializable { /* * The location of the json data storage. */ - @JsonProperty(value = "location", required = true) private DatasetLocation location; /* @@ -27,19 +30,16 @@ public final class XmlDatasetTypeProperties { * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "encodingName") private Object encodingName; /* * The null value string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nullValue") private Object nullValue; /* * The data compression method used for the json dataset. */ - @JsonProperty(value = "compression") private DatasetCompression compression; /** @@ -153,4 +153,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(XmlDatasetTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("location", this.location); + jsonWriter.writeUntypedField("encodingName", this.encodingName); + jsonWriter.writeUntypedField("nullValue", this.nullValue); + jsonWriter.writeJsonField("compression", this.compression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XmlDatasetTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XmlDatasetTypeProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the XmlDatasetTypeProperties. + */ + public static XmlDatasetTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XmlDatasetTypeProperties deserializedXmlDatasetTypeProperties = new XmlDatasetTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedXmlDatasetTypeProperties.location = DatasetLocation.fromJson(reader); + } else if ("encodingName".equals(fieldName)) { + deserializedXmlDatasetTypeProperties.encodingName = reader.readUntyped(); + } else if ("nullValue".equals(fieldName)) { + deserializedXmlDatasetTypeProperties.nullValue = reader.readUntyped(); + } else if ("compression".equals(fieldName)) { + deserializedXmlDatasetTypeProperties.compression = DatasetCompression.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedXmlDatasetTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java index f03d696d621a..3d35e39e1ad4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZendeskLinkedServiceTypeProperties.java @@ -6,50 +6,48 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; import com.azure.resourcemanager.datafactory.models.ZendeskAuthenticationType; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Zendesk linked service type properties. */ @Fluent -public final class ZendeskLinkedServiceTypeProperties { +public final class ZendeskLinkedServiceTypeProperties implements JsonSerializable { /* * The authentication type to use. */ - @JsonProperty(value = "authenticationType", required = true) private ZendeskAuthenticationType authenticationType; /* * The url to connect Zendesk source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "url", required = true) private Object url; /* * The username of the Zendesk source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName") private Object username; /* * The password of the Zendesk source. */ - @JsonProperty(value = "password") private SecretBase password; /* * The api token for the Zendesk source. */ - @JsonProperty(value = "apiToken") private SecretBase apiToken; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -207,4 +205,59 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ZendeskLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeJsonField("apiToken", this.apiToken); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZendeskLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZendeskLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ZendeskLinkedServiceTypeProperties. + */ + public static ZendeskLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZendeskLinkedServiceTypeProperties deserializedZendeskLinkedServiceTypeProperties + = new ZendeskLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("authenticationType".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.authenticationType + = ZendeskAuthenticationType.fromString(reader.getString()); + } else if ("url".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.password = SecretBase.fromJson(reader); + } else if ("apiToken".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.apiToken = SecretBase.fromJson(reader); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedZendeskLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedZendeskLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java index 02fb434ba14d..698302d0c149 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/ZohoLinkedServiceTypeProperties.java @@ -5,57 +5,54 @@ package com.azure.resourcemanager.datafactory.fluent.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.models.SecretBase; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * Zoho server linked service properties. */ @Fluent -public final class ZohoLinkedServiceTypeProperties { +public final class ZohoLinkedServiceTypeProperties implements JsonSerializable { /* * Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. * Type: object. */ - @JsonProperty(value = "connectionProperties") private Object connectionProperties; /* * The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */ - @JsonProperty(value = "endpoint") private Object endpoint; /* * The access token for Zoho authentication. */ - @JsonProperty(value = "accessToken") private SecretBase accessToken; /* * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - @JsonProperty(value = "useEncryptedEndpoints") private Object useEncryptedEndpoints; /* * Specifies whether to require the host name in the server's certificate to match the host name of the server when * connecting over SSL. The default value is true. */ - @JsonProperty(value = "useHostVerification") private Object useHostVerification; /* * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - @JsonProperty(value = "usePeerVerification") private Object usePeerVerification; /* * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime * credential manager. Type: string. */ - @JsonProperty(value = "encryptedCredential") private String encryptedCredential; /** @@ -224,4 +221,59 @@ public void validate() { accessToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("connectionProperties", this.connectionProperties); + jsonWriter.writeUntypedField("endpoint", this.endpoint); + jsonWriter.writeJsonField("accessToken", this.accessToken); + jsonWriter.writeUntypedField("useEncryptedEndpoints", this.useEncryptedEndpoints); + jsonWriter.writeUntypedField("useHostVerification", this.useHostVerification); + jsonWriter.writeUntypedField("usePeerVerification", this.usePeerVerification); + jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZohoLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZohoLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ZohoLinkedServiceTypeProperties. + */ + public static ZohoLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZohoLinkedServiceTypeProperties deserializedZohoLinkedServiceTypeProperties + = new ZohoLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionProperties".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.connectionProperties = reader.readUntyped(); + } else if ("endpoint".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.endpoint = reader.readUntyped(); + } else if ("accessToken".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.accessToken = SecretBase.fromJson(reader); + } else if ("useEncryptedEndpoints".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.useEncryptedEndpoints = reader.readUntyped(); + } else if ("useHostVerification".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.useHostVerification = reader.readUntyped(); + } else if ("usePeerVerification".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.usePeerVerification = reader.readUntyped(); + } else if ("encryptedCredential".equals(fieldName)) { + deserializedZohoLinkedServiceTypeProperties.encryptedCredential = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedZohoLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java index 486ecccbcab7..10179409b557 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Activity.java @@ -6,14 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -21,62 +18,47 @@ /** * A pipeline activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Activity.class, visible = true) -@JsonTypeName("Activity") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Container", value = ControlActivity.class), - @JsonSubTypes.Type(name = "Execution", value = ExecutionActivity.class), - @JsonSubTypes.Type(name = "ExecuteWranglingDataflow", value = ExecuteWranglingDataflowActivity.class) }) @Fluent -public class Activity { +public class Activity implements JsonSerializable { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Activity"; /* * Activity name. */ - @JsonProperty(value = "name", required = true) private String name; /* * Activity description. */ - @JsonProperty(value = "description") private String description; /* * Activity state. This is an optional property and if not provided, the state will be Active by default. */ - @JsonProperty(value = "state") private ActivityState state; /* * Status result of the activity when the state is set to Inactive. This is an optional property and if not provided * when the activity is inactive, the status will be Succeeded by default. */ - @JsonProperty(value = "onInactiveMarkAs") private ActivityOnInactiveMarkAs onInactiveMarkAs; /* * Activity depends on condition. */ - @JsonProperty(value = "dependsOn") private List dependsOn; /* * Activity user properties. */ - @JsonProperty(value = "userProperties") private List userProperties; /* * A pipeline activity. */ - @JsonIgnore private Map additionalProperties; /** @@ -223,7 +205,6 @@ public Activity withUserProperties(List userProperties) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -239,14 +220,6 @@ public Activity withAdditionalProperties(Map additionalPropertie return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -266,4 +239,180 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Activity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeStringField("state", this.state == null ? null : this.state.toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + this.onInactiveMarkAs == null ? null : this.onInactiveMarkAs.toString()); + jsonWriter.writeArrayField("dependsOn", this.dependsOn, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", this.userProperties, + (writer, element) -> writer.writeJson(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Activity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Activity if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Activity. + */ + public static Activity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Container".equals(discriminatorValue)) { + return ControlActivity.fromJsonKnownDiscriminator(readerToUse.reset()); + } else if ("ExecutePipeline".equals(discriminatorValue)) { + return ExecutePipelineActivity.fromJson(readerToUse.reset()); + } else if ("IfCondition".equals(discriminatorValue)) { + return IfConditionActivity.fromJson(readerToUse.reset()); + } else if ("Switch".equals(discriminatorValue)) { + return SwitchActivity.fromJson(readerToUse.reset()); + } else if ("ForEach".equals(discriminatorValue)) { + return ForEachActivity.fromJson(readerToUse.reset()); + } else if ("Wait".equals(discriminatorValue)) { + return WaitActivity.fromJson(readerToUse.reset()); + } else if ("Fail".equals(discriminatorValue)) { + return FailActivity.fromJson(readerToUse.reset()); + } else if ("Until".equals(discriminatorValue)) { + return UntilActivity.fromJson(readerToUse.reset()); + } else if ("Validation".equals(discriminatorValue)) { + return ValidationActivity.fromJson(readerToUse.reset()); + } else if ("Filter".equals(discriminatorValue)) { + return FilterActivity.fromJson(readerToUse.reset()); + } else if ("SetVariable".equals(discriminatorValue)) { + return SetVariableActivity.fromJson(readerToUse.reset()); + } else if ("AppendVariable".equals(discriminatorValue)) { + return AppendVariableActivity.fromJson(readerToUse.reset()); + } else if ("WebHook".equals(discriminatorValue)) { + return WebhookActivity.fromJson(readerToUse.reset()); + } else if ("Execution".equals(discriminatorValue)) { + return ExecutionActivity.fromJsonKnownDiscriminator(readerToUse.reset()); + } else if ("Copy".equals(discriminatorValue)) { + return CopyActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightHive".equals(discriminatorValue)) { + return HDInsightHiveActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightPig".equals(discriminatorValue)) { + return HDInsightPigActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightMapReduce".equals(discriminatorValue)) { + return HDInsightMapReduceActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightStreaming".equals(discriminatorValue)) { + return HDInsightStreamingActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightSpark".equals(discriminatorValue)) { + return HDInsightSparkActivity.fromJson(readerToUse.reset()); + } else if ("ExecuteSSISPackage".equals(discriminatorValue)) { + return ExecuteSsisPackageActivity.fromJson(readerToUse.reset()); + } else if ("Custom".equals(discriminatorValue)) { + return CustomActivity.fromJson(readerToUse.reset()); + } else if ("SqlServerStoredProcedure".equals(discriminatorValue)) { + return SqlServerStoredProcedureActivity.fromJson(readerToUse.reset()); + } else if ("Delete".equals(discriminatorValue)) { + return DeleteActivity.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorerCommand".equals(discriminatorValue)) { + return AzureDataExplorerCommandActivity.fromJson(readerToUse.reset()); + } else if ("Lookup".equals(discriminatorValue)) { + return LookupActivity.fromJson(readerToUse.reset()); + } else if ("WebActivity".equals(discriminatorValue)) { + return WebActivity.fromJson(readerToUse.reset()); + } else if ("GetMetadata".equals(discriminatorValue)) { + return GetMetadataActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLBatchExecution".equals(discriminatorValue)) { + return AzureMLBatchExecutionActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLUpdateResource".equals(discriminatorValue)) { + return AzureMLUpdateResourceActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLExecutePipeline".equals(discriminatorValue)) { + return AzureMLExecutePipelineActivity.fromJson(readerToUse.reset()); + } else if ("DataLakeAnalyticsU-SQL".equals(discriminatorValue)) { + return DataLakeAnalyticsUsqlActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksNotebook".equals(discriminatorValue)) { + return DatabricksNotebookActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksSparkJar".equals(discriminatorValue)) { + return DatabricksSparkJarActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksSparkPython".equals(discriminatorValue)) { + return DatabricksSparkPythonActivity.fromJson(readerToUse.reset()); + } else if ("AzureFunctionActivity".equals(discriminatorValue)) { + return AzureFunctionActivity.fromJson(readerToUse.reset()); + } else if ("ExecuteDataFlow".equals(discriminatorValue)) { + return ExecuteDataFlowActivity.fromJson(readerToUse.reset()); + } else if ("Script".equals(discriminatorValue)) { + return ScriptActivity.fromJson(readerToUse.reset()); + } else if ("SynapseNotebook".equals(discriminatorValue)) { + return SynapseNotebookActivity.fromJson(readerToUse.reset()); + } else if ("SparkJob".equals(discriminatorValue)) { + return SynapseSparkJobDefinitionActivity.fromJson(readerToUse.reset()); + } else if ("ExecuteWranglingDataflow".equals(discriminatorValue)) { + return ExecuteWranglingDataflowActivity.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static Activity fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Activity deserializedActivity = new Activity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedActivity.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedActivity.type = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedActivity.description = reader.getString(); + } else if ("state".equals(fieldName)) { + deserializedActivity.state = ActivityState.fromString(reader.getString()); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedActivity.onInactiveMarkAs = ActivityOnInactiveMarkAs.fromString(reader.getString()); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedActivity.dependsOn = dependsOn; + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedActivity.userProperties = userProperties; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedActivity.additionalProperties = additionalProperties; + + return deserializedActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java index 6e92acf825ed..eec010de959b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityDependency.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -18,23 +19,20 @@ * Activity dependency information. */ @Fluent -public final class ActivityDependency { +public final class ActivityDependency implements JsonSerializable { /* * Activity name. */ - @JsonProperty(value = "activity", required = true) private String activity; /* * Match-Condition for the dependency. */ - @JsonProperty(value = "dependencyConditions", required = true) private List dependencyConditions; /* * Activity dependency information. */ - @JsonIgnore private Map additionalProperties; /** @@ -88,7 +86,6 @@ public ActivityDependency withDependencyConditions(List dep * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -104,14 +101,6 @@ public ActivityDependency withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -130,4 +119,58 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ActivityDependency.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("activity", this.activity); + jsonWriter.writeArrayField("dependencyConditions", this.dependencyConditions, + (writer, element) -> writer.writeString(element == null ? null : element.toString())); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ActivityDependency from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ActivityDependency if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ActivityDependency. + */ + public static ActivityDependency fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ActivityDependency deserializedActivityDependency = new ActivityDependency(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("activity".equals(fieldName)) { + deserializedActivityDependency.activity = reader.getString(); + } else if ("dependencyConditions".equals(fieldName)) { + List dependencyConditions + = reader.readArray(reader1 -> DependencyCondition.fromString(reader1.getString())); + deserializedActivityDependency.dependencyConditions = dependencyConditions; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedActivityDependency.additionalProperties = additionalProperties; + + return deserializedActivityDependency; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityOnInactiveMarkAs.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityOnInactiveMarkAs.java index 476f28082122..0ec09d39feb2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityOnInactiveMarkAs.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityOnInactiveMarkAs.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -43,7 +42,6 @@ public ActivityOnInactiveMarkAs() { * @param name a name to look for. * @return the corresponding ActivityOnInactiveMarkAs. */ - @JsonCreator public static ActivityOnInactiveMarkAs fromString(String name) { return fromString(name, ActivityOnInactiveMarkAs.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java index cafe0a8f85fd..93d3d84679a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityPolicy.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,42 +17,36 @@ * Execution policy for an activity. */ @Fluent -public final class ActivityPolicy { +public final class ActivityPolicy implements JsonSerializable { /* * Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "timeout") private Object timeout; /* * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "retry") private Object retry; /* * Interval between each retry attempt (in seconds). The default is 30 sec. */ - @JsonProperty(value = "retryIntervalInSeconds") private Integer retryIntervalInSeconds; /* * When set to true, Input from activity is considered as secure and will not be logged to monitoring. */ - @JsonProperty(value = "secureInput") private Boolean secureInput; /* * When set to true, Output from activity is considered as secure and will not be logged to monitoring. */ - @JsonProperty(value = "secureOutput") private Boolean secureOutput; /* * Execution policy for an activity. */ - @JsonIgnore private Map additionalProperties; /** @@ -173,7 +168,6 @@ public ActivityPolicy withSecureOutput(Boolean secureOutput) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -189,14 +183,6 @@ public ActivityPolicy withAdditionalProperties(Map additionalPro return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -204,4 +190,63 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("timeout", this.timeout); + jsonWriter.writeUntypedField("retry", this.retry); + jsonWriter.writeNumberField("retryIntervalInSeconds", this.retryIntervalInSeconds); + jsonWriter.writeBooleanField("secureInput", this.secureInput); + jsonWriter.writeBooleanField("secureOutput", this.secureOutput); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ActivityPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ActivityPolicy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ActivityPolicy. + */ + public static ActivityPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ActivityPolicy deserializedActivityPolicy = new ActivityPolicy(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("timeout".equals(fieldName)) { + deserializedActivityPolicy.timeout = reader.readUntyped(); + } else if ("retry".equals(fieldName)) { + deserializedActivityPolicy.retry = reader.readUntyped(); + } else if ("retryIntervalInSeconds".equals(fieldName)) { + deserializedActivityPolicy.retryIntervalInSeconds = reader.getNullable(JsonReader::getInt); + } else if ("secureInput".equals(fieldName)) { + deserializedActivityPolicy.secureInput = reader.getNullable(JsonReader::getBoolean); + } else if ("secureOutput".equals(fieldName)) { + deserializedActivityPolicy.secureOutput = reader.getNullable(JsonReader::getBoolean); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedActivityPolicy.additionalProperties = additionalProperties; + + return deserializedActivityPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityRun.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityRun.java index a5bba9e73e97..6ff71546fae0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityRun.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityRun.java @@ -5,10 +5,12 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.Map; @@ -17,89 +19,75 @@ * Information about an activity run in a pipeline. */ @Fluent -public final class ActivityRun { +public final class ActivityRun implements JsonSerializable { /* * The name of the pipeline. */ - @JsonProperty(value = "pipelineName", access = JsonProperty.Access.WRITE_ONLY) private String pipelineName; /* * The id of the pipeline run. */ - @JsonProperty(value = "pipelineRunId", access = JsonProperty.Access.WRITE_ONLY) private String pipelineRunId; /* * The name of the activity. */ - @JsonProperty(value = "activityName", access = JsonProperty.Access.WRITE_ONLY) private String activityName; /* * The type of the activity. */ - @JsonProperty(value = "activityType", access = JsonProperty.Access.WRITE_ONLY) private String activityType; /* * The id of the activity run. */ - @JsonProperty(value = "activityRunId", access = JsonProperty.Access.WRITE_ONLY) private String activityRunId; /* * The name of the compute linked service. */ - @JsonProperty(value = "linkedServiceName", access = JsonProperty.Access.WRITE_ONLY) private String linkedServiceName; /* * The status of the activity run. */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private String status; /* * The start time of the activity run in 'ISO 8601' format. */ - @JsonProperty(value = "activityRunStart", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime activityRunStart; /* * The end time of the activity run in 'ISO 8601' format. */ - @JsonProperty(value = "activityRunEnd", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime activityRunEnd; /* * The duration of the activity run. */ - @JsonProperty(value = "durationInMs", access = JsonProperty.Access.WRITE_ONLY) private Integer durationInMs; /* * The input for the activity. */ - @JsonProperty(value = "input", access = JsonProperty.Access.WRITE_ONLY) private Object input; /* * The output for the activity. */ - @JsonProperty(value = "output", access = JsonProperty.Access.WRITE_ONLY) private Object output; /* * The error if any from the activity run. */ - @JsonProperty(value = "error", access = JsonProperty.Access.WRITE_ONLY) private Object error; /* * Information about an activity run in a pipeline. */ - @JsonIgnore private Map additionalProperties; /** @@ -230,7 +218,6 @@ public Object error() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -246,14 +233,6 @@ public ActivityRun withAdditionalProperties(Map additionalProper return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -261,4 +240,76 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ActivityRun from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ActivityRun if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ActivityRun. + */ + public static ActivityRun fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ActivityRun deserializedActivityRun = new ActivityRun(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("pipelineName".equals(fieldName)) { + deserializedActivityRun.pipelineName = reader.getString(); + } else if ("pipelineRunId".equals(fieldName)) { + deserializedActivityRun.pipelineRunId = reader.getString(); + } else if ("activityName".equals(fieldName)) { + deserializedActivityRun.activityName = reader.getString(); + } else if ("activityType".equals(fieldName)) { + deserializedActivityRun.activityType = reader.getString(); + } else if ("activityRunId".equals(fieldName)) { + deserializedActivityRun.activityRunId = reader.getString(); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedActivityRun.linkedServiceName = reader.getString(); + } else if ("status".equals(fieldName)) { + deserializedActivityRun.status = reader.getString(); + } else if ("activityRunStart".equals(fieldName)) { + deserializedActivityRun.activityRunStart = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("activityRunEnd".equals(fieldName)) { + deserializedActivityRun.activityRunEnd = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("durationInMs".equals(fieldName)) { + deserializedActivityRun.durationInMs = reader.getNullable(JsonReader::getInt); + } else if ("input".equals(fieldName)) { + deserializedActivityRun.input = reader.readUntyped(); + } else if ("output".equals(fieldName)) { + deserializedActivityRun.output = reader.readUntyped(); + } else if ("error".equals(fieldName)) { + deserializedActivityRun.error = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedActivityRun.additionalProperties = additionalProperties; + + return deserializedActivityRun; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityState.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityState.java index 13abd531e803..c46702c72126 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityState.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ActivityState.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public ActivityState() { * @param name a name to look for. * @return the corresponding ActivityState. */ - @JsonCreator public static ActivityState fromString(String name) { return fromString(name, ActivityState.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java index d971145fb286..3f0a6ceeb8f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonMwsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Amazon Marketplace Web Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsLinkedService.class, visible = true) -@JsonTypeName("AmazonMWS") @Fluent public final class AmazonMwsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonMWS"; /* * Amazon Marketplace Web Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonMwsLinkedServiceTypeProperties innerTypeProperties = new AmazonMwsLinkedServiceTypeProperties(); /** @@ -353,4 +349,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonMwsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonMwsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonMwsLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonMwsLinkedService. + */ + public static AmazonMwsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonMwsLinkedService deserializedAmazonMwsLinkedService = new AmazonMwsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonMwsLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonMwsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonMwsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonMwsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonMwsLinkedService.innerTypeProperties + = AmazonMwsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonMwsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonMwsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonMwsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java index 067a02d4c96e..09b73f360d13 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Amazon Marketplace Web Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsObjectDataset.class, visible = true) -@JsonTypeName("AmazonMWSObject") @Fluent public final class AmazonMwsObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonMWSObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonMwsObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonMwsObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonMwsObjectDataset. + */ + public static AmazonMwsObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonMwsObjectDataset deserializedAmazonMwsObjectDataset = new AmazonMwsObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonMwsObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonMwsObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonMwsObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonMwsObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedAmazonMwsObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java index 816eaf613a01..1b88c960675a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonMwsSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Amazon Marketplace Web Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonMwsSource.class, visible = true) -@JsonTypeName("AmazonMWSSource") @Fluent public final class AmazonMwsSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonMWSSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public AmazonMwsSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonMwsSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonMwsSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonMwsSource. + */ + public static AmazonMwsSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonMwsSource deserializedAmazonMwsSource = new AmazonMwsSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAmazonMwsSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAmazonMwsSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonMwsSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonMwsSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAmazonMwsSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAmazonMwsSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonMwsSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAmazonMwsSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonMwsSource.withAdditionalProperties(additionalProperties); + + return deserializedAmazonMwsSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java index ccb58d761a7b..848a89ef3b48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * AmazonRdsForOracle database. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForOracleLinkedService.class, - visible = true) -@JsonTypeName("AmazonRdsForOracle") @Fluent public final class AmazonRdsForOracleLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForOracle"; /* * AmazonRdsForOracle database linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonRdsForLinkedServiceTypeProperties innerTypeProperties = new AmazonRdsForLinkedServiceTypeProperties(); /** @@ -190,4 +182,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonRdsForOracleLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForOracleLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForOracleLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRdsForOracleLinkedService. + */ + public static AmazonRdsForOracleLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForOracleLinkedService deserializedAmazonRdsForOracleLinkedService + = new AmazonRdsForOracleLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonRdsForOracleLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRdsForOracleLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRdsForOracleLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRdsForOracleLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRdsForOracleLinkedService.innerTypeProperties + = AmazonRdsForLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForOracleLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForOracleLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForOracleLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java index 398ad7e1377e..9222ec153838 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOraclePartitionSettings.java @@ -5,38 +5,39 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for AmazonRdsForOracle source partitioning. */ @Fluent -public final class AmazonRdsForOraclePartitionSettings { +public final class AmazonRdsForOraclePartitionSettings + implements JsonSerializable { /* * Names of the physical partitions of AmazonRdsForOracle table. */ - @JsonProperty(value = "partitionNames") private Object partitionNames; /* * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /** @@ -138,4 +139,50 @@ public AmazonRdsForOraclePartitionSettings withPartitionLowerBound(Object partit */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionNames", this.partitionNames); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForOraclePartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForOraclePartitionSettings if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForOraclePartitionSettings. + */ + public static AmazonRdsForOraclePartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForOraclePartitionSettings deserializedAmazonRdsForOraclePartitionSettings + = new AmazonRdsForOraclePartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionNames".equals(fieldName)) { + deserializedAmazonRdsForOraclePartitionSettings.partitionNames = reader.readUntyped(); + } else if ("partitionColumnName".equals(fieldName)) { + deserializedAmazonRdsForOraclePartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedAmazonRdsForOraclePartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedAmazonRdsForOraclePartitionSettings.partitionLowerBound = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAmazonRdsForOraclePartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java index d90a15dee3bb..87214d2233f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleSource.java @@ -5,60 +5,49 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity AmazonRdsForOracle source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForOracleSource.class, - visible = true) -@JsonTypeName("AmazonRdsForOracleSource") @Fluent public final class AmazonRdsForOracleSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForOracleSource"; /* * AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "oracleReaderQuery") private Object oracleReaderQuery; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for AmazonRdsForOracle source partitioning. */ - @JsonProperty(value = "partitionSettings") private AmazonRdsForOraclePartitionSettings partitionSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -235,4 +224,79 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("oracleReaderQuery", this.oracleReaderQuery); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForOracleSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForOracleSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForOracleSource. + */ + public static AmazonRdsForOracleSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForOracleSource deserializedAmazonRdsForOracleSource = new AmazonRdsForOracleSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.type = reader.getString(); + } else if ("oracleReaderQuery".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.oracleReaderQuery = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.queryTimeout = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.partitionSettings + = AmazonRdsForOraclePartitionSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAmazonRdsForOracleSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForOracleSource.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForOracleSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java index 1c26cd901a86..3f9fe8b53d08 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForOracleTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForOracleTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The AmazonRdsForOracle database dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForOracleTableDataset.class, - visible = true) -@JsonTypeName("AmazonRdsForOracleTable") @Fluent public final class AmazonRdsForOracleTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForOracleTable"; /* * AmazonRdsForOracle dataset properties. */ - @JsonProperty(value = "typeProperties") private AmazonRdsForOracleTableDatasetTypeProperties innerTypeProperties; /** @@ -187,4 +179,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForOracleTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForOracleTableDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRdsForOracleTableDataset. + */ + public static AmazonRdsForOracleTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForOracleTableDataset deserializedAmazonRdsForOracleTableDataset + = new AmazonRdsForOracleTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRdsForOracleTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRdsForOracleTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRdsForOracleTableDataset.innerTypeProperties + = AmazonRdsForOracleTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForOracleTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForOracleTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlAuthenticationType.java index c7bc5760732c..aa7849afa277 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public AmazonRdsForSqlAuthenticationType() { * @param name a name to look for. * @return the corresponding AmazonRdsForSqlAuthenticationType. */ - @JsonCreator public static AmazonRdsForSqlAuthenticationType fromString(String name) { return fromString(name, AmazonRdsForSqlAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java index 787df9f98bc8..1c1c652e2aa5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Amazon RDS for SQL Server linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForSqlServerLinkedService.class, - visible = true) -@JsonTypeName("AmazonRdsForSqlServer") @Fluent public final class AmazonRdsForSqlServerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForSqlServer"; /* * Amazon RDS for SQL Server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonRdsForSqlServerLinkedServiceTypeProperties innerTypeProperties = new AmazonRdsForSqlServerLinkedServiceTypeProperties(); @@ -763,4 +755,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonRdsForSqlServerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForSqlServerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForSqlServerLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRdsForSqlServerLinkedService. + */ + public static AmazonRdsForSqlServerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForSqlServerLinkedService deserializedAmazonRdsForSqlServerLinkedService + = new AmazonRdsForSqlServerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRdsForSqlServerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRdsForSqlServerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedService.innerTypeProperties + = AmazonRdsForSqlServerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForSqlServerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForSqlServerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForSqlServerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java index eb406ff13c0d..6e30434c5c7d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerSource.java @@ -5,46 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Amazon RDS for SQL Server source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForSqlServerSource.class, - visible = true) -@JsonTypeName("AmazonRdsForSqlServerSource") @Fluent public final class AmazonRdsForSqlServerSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForSqlServerSource"; /* * SQL reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -52,26 +43,22 @@ public final class AmazonRdsForSqlServerSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * Which additional types to produce. */ - @JsonProperty(value = "produceAdditionalTypes") private Object produceAdditionalTypes; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -306,4 +293,90 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("produceAdditionalTypes", this.produceAdditionalTypes); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForSqlServerSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForSqlServerSource if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRdsForSqlServerSource. + */ + public static AmazonRdsForSqlServerSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForSqlServerSource deserializedAmazonRdsForSqlServerSource = new AmazonRdsForSqlServerSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.isolationLevel = reader.readUntyped(); + } else if ("produceAdditionalTypes".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.produceAdditionalTypes = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedAmazonRdsForSqlServerSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForSqlServerSource.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForSqlServerSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java index e726d162c182..1f77c4e45bbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRdsForSqlServerTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Amazon RDS for SQL Server dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRdsForSqlServerTableDataset.class, - visible = true) -@JsonTypeName("AmazonRdsForSqlServerTable") @Fluent public final class AmazonRdsForSqlServerTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRdsForSqlServerTable"; /* * The Amazon RDS for SQL Server dataset properties. */ - @JsonProperty(value = "typeProperties") private AmazonRdsForSqlServerTableDatasetTypeProperties innerTypeProperties; /** @@ -187,4 +179,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRdsForSqlServerTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRdsForSqlServerTableDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRdsForSqlServerTableDataset. + */ + public static AmazonRdsForSqlServerTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRdsForSqlServerTableDataset deserializedAmazonRdsForSqlServerTableDataset + = new AmazonRdsForSqlServerTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRdsForSqlServerTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRdsForSqlServerTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRdsForSqlServerTableDataset.innerTypeProperties + = AmazonRdsForSqlServerTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRdsForSqlServerTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRdsForSqlServerTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java index 91e640adad37..4a42abf0f0ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Amazon Redshift. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRedshiftLinkedService.class, - visible = true) -@JsonTypeName("AmazonRedshift") @Fluent public final class AmazonRedshiftLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRedshift"; /* * Amazon Redshift linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonRedshiftLinkedServiceTypeProperties innerTypeProperties = new AmazonRedshiftLinkedServiceTypeProperties(); @@ -266,4 +258,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonRedshiftLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRedshiftLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRedshiftLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRedshiftLinkedService. + */ + public static AmazonRedshiftLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRedshiftLinkedService deserializedAmazonRedshiftLinkedService = new AmazonRedshiftLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonRedshiftLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRedshiftLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRedshiftLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRedshiftLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRedshiftLinkedService.innerTypeProperties + = AmazonRedshiftLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonRedshiftLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRedshiftLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRedshiftLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java index 2afbfc4655b2..23df97e0dc0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Amazon Redshift Source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonRedshiftSource.class, visible = true) -@JsonTypeName("AmazonRedshiftSource") @Fluent public final class AmazonRedshiftSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRedshiftSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -35,7 +32,6 @@ public final class AmazonRedshiftSource extends TabularSource { * data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the * interim S3. */ - @JsonProperty(value = "redshiftUnloadSettings") private RedshiftUnloadSettings redshiftUnloadSettings; /** @@ -164,4 +160,75 @@ public void validate() { redshiftUnloadSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeJsonField("redshiftUnloadSettings", this.redshiftUnloadSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRedshiftSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRedshiftSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonRedshiftSource. + */ + public static AmazonRedshiftSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRedshiftSource deserializedAmazonRedshiftSource = new AmazonRedshiftSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAmazonRedshiftSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAmazonRedshiftSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonRedshiftSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonRedshiftSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAmazonRedshiftSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAmazonRedshiftSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonRedshiftSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAmazonRedshiftSource.query = reader.readUntyped(); + } else if ("redshiftUnloadSettings".equals(fieldName)) { + deserializedAmazonRedshiftSource.redshiftUnloadSettings = RedshiftUnloadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRedshiftSource.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRedshiftSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java index 0a36e45eec02..21486152a68b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonRedshiftTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Amazon Redshift table dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonRedshiftTableDataset.class, - visible = true) -@JsonTypeName("AmazonRedshiftTable") @Fluent public final class AmazonRedshiftTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonRedshiftTable"; /* * Amazon Redshift table dataset properties. */ - @JsonProperty(value = "typeProperties") private AmazonRedshiftTableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +200,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonRedshiftTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonRedshiftTableDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonRedshiftTableDataset. + */ + public static AmazonRedshiftTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonRedshiftTableDataset deserializedAmazonRedshiftTableDataset = new AmazonRedshiftTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonRedshiftTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonRedshiftTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonRedshiftTableDataset.innerTypeProperties + = AmazonRedshiftTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonRedshiftTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAmazonRedshiftTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java index 8f21e8f12ab0..78b6a71f9174 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3CompatibleLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Amazon S3 Compatible. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonS3CompatibleLinkedService.class, - visible = true) -@JsonTypeName("AmazonS3Compatible") @Fluent public final class AmazonS3CompatibleLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3Compatible"; /* * Amazon S3 Compatible linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonS3CompatibleLinkedServiceTypeProperties innerTypeProperties = new AmazonS3CompatibleLinkedServiceTypeProperties(); @@ -245,4 +237,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonS3CompatibleLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3CompatibleLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3CompatibleLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonS3CompatibleLinkedService. + */ + public static AmazonS3CompatibleLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3CompatibleLinkedService deserializedAmazonS3CompatibleLinkedService + = new AmazonS3CompatibleLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonS3CompatibleLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonS3CompatibleLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedService.innerTypeProperties + = AmazonS3CompatibleLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3CompatibleLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3CompatibleLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3CompatibleLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java index c037391516b2..85879937f3eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleLocation.java @@ -5,39 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of Amazon S3 Compatible dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonS3CompatibleLocation.class, - visible = true) -@JsonTypeName("AmazonS3CompatibleLocation") @Fluent public final class AmazonS3CompatibleLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3CompatibleLocation"; /* * Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "bucketName") private Object bucketName; /* * Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /** @@ -127,4 +119,63 @@ public AmazonS3CompatibleLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + jsonWriter.writeUntypedField("version", this.version); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3CompatibleLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3CompatibleLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3CompatibleLocation. + */ + public static AmazonS3CompatibleLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3CompatibleLocation deserializedAmazonS3CompatibleLocation = new AmazonS3CompatibleLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAmazonS3CompatibleLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAmazonS3CompatibleLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3CompatibleLocation.type = reader.getString(); + } else if ("bucketName".equals(fieldName)) { + deserializedAmazonS3CompatibleLocation.bucketName = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedAmazonS3CompatibleLocation.version = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3CompatibleLocation.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3CompatibleLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java index be80cebaa0f9..9d1cbbaf6be7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3CompatibleReadSettings.java @@ -5,90 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Amazon S3 Compatible read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AmazonS3CompatibleReadSettings.class, - visible = true) -@JsonTypeName("AmazonS3CompatibleReadSettings") @Fluent public final class AmazonS3CompatibleReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3CompatibleReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -354,4 +338,88 @@ public AmazonS3CompatibleReadSettings withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3CompatibleReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3CompatibleReadSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3CompatibleReadSettings. + */ + public static AmazonS3CompatibleReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3CompatibleReadSettings deserializedAmazonS3CompatibleReadSettings + = new AmazonS3CompatibleReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAmazonS3CompatibleReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3CompatibleReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3CompatibleReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java index 9c211276477c..dc2829d3cad8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Dataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3DatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3Dataset.class, visible = true) -@JsonTypeName("AmazonS3Object") @Fluent public final class AmazonS3Dataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3Object"; /* * Amazon S3 dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonS3DatasetTypeProperties innerTypeProperties = new AmazonS3DatasetTypeProperties(); /** @@ -332,4 +328,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonS3Dataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3Dataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3Dataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonS3Dataset. + */ + public static AmazonS3Dataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3Dataset deserializedAmazonS3Dataset = new AmazonS3Dataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAmazonS3Dataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonS3Dataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAmazonS3Dataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAmazonS3Dataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonS3Dataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonS3Dataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAmazonS3Dataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonS3Dataset.innerTypeProperties = AmazonS3DatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3Dataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3Dataset.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3Dataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java index 9b39636d26a6..db045a571431 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3LinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AmazonS3LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Amazon S3. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3LinkedService.class, visible = true) -@JsonTypeName("AmazonS3") @Fluent public final class AmazonS3LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3"; /* * Amazon S3 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AmazonS3LinkedServiceTypeProperties innerTypeProperties = new AmazonS3LinkedServiceTypeProperties(); /** @@ -263,4 +259,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AmazonS3LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3LinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AmazonS3LinkedService. + */ + public static AmazonS3LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3LinkedService deserializedAmazonS3LinkedService = new AmazonS3LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAmazonS3LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAmazonS3LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAmazonS3LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAmazonS3LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAmazonS3LinkedService.innerTypeProperties + = AmazonS3LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java index 40322316a981..b49923bbb878 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3Location.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of amazon S3 dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3Location.class, visible = true) -@JsonTypeName("AmazonS3Location") @Fluent public final class AmazonS3Location extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3Location"; /* * Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "bucketName") private Object bucketName; /* * Specify the version of amazon S3. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /** @@ -121,4 +117,63 @@ public AmazonS3Location withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + jsonWriter.writeUntypedField("version", this.version); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3Location from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3Location if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3Location. + */ + public static AmazonS3Location fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3Location deserializedAmazonS3Location = new AmazonS3Location(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAmazonS3Location.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAmazonS3Location.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3Location.type = reader.getString(); + } else if ("bucketName".equals(fieldName)) { + deserializedAmazonS3Location.bucketName = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedAmazonS3Location.version = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3Location.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3Location; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java index 7f3f9b5cf2fe..24f4f0dd93b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AmazonS3ReadSettings.java @@ -5,86 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Amazon S3 read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AmazonS3ReadSettings.class, visible = true) -@JsonTypeName("AmazonS3ReadSettings") @Fluent public final class AmazonS3ReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AmazonS3ReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -350,4 +338,87 @@ public AmazonS3ReadSettings withDisableMetricsCollection(Object disableMetricsCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AmazonS3ReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AmazonS3ReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AmazonS3ReadSettings. + */ + public static AmazonS3ReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AmazonS3ReadSettings deserializedAmazonS3ReadSettings = new AmazonS3ReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAmazonS3ReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAmazonS3ReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAmazonS3ReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAmazonS3ReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAmazonS3ReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAmazonS3ReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedAmazonS3ReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAmazonS3ReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAmazonS3ReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAmazonS3ReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAmazonS3ReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAmazonS3ReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAmazonS3ReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAmazonS3ReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAmazonS3ReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java index 48c8b7eec2ac..dbf8f4753084 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppFiguresLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AppFiguresLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for AppFigures. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AppFiguresLinkedService.class, - visible = true) -@JsonTypeName("AppFigures") @Fluent public final class AppFiguresLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AppFigures"; /* * AppFigures linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AppFiguresLinkedServiceTypeProperties innerTypeProperties = new AppFiguresLinkedServiceTypeProperties(); /** @@ -188,4 +180,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AppFiguresLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AppFiguresLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AppFiguresLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AppFiguresLinkedService. + */ + public static AppFiguresLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AppFiguresLinkedService deserializedAppFiguresLinkedService = new AppFiguresLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAppFiguresLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAppFiguresLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAppFiguresLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAppFiguresLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAppFiguresLinkedService.innerTypeProperties + = AppFiguresLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAppFiguresLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAppFiguresLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAppFiguresLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java index b43b77fc08ba..c0d10f6a558d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AppendVariableActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AppendVariableActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Append value for a Variable of type Array. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AppendVariableActivity.class, visible = true) -@JsonTypeName("AppendVariable") @Fluent public final class AppendVariableActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AppendVariable"; /* * Append Variable activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AppendVariableActivityTypeProperties innerTypeProperties = new AppendVariableActivityTypeProperties(); /** @@ -178,4 +175,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AppendVariableActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AppendVariableActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AppendVariableActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AppendVariableActivity. + */ + public static AppendVariableActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AppendVariableActivity deserializedAppendVariableActivity = new AppendVariableActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAppendVariableActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAppendVariableActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAppendVariableActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAppendVariableActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAppendVariableActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAppendVariableActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedAppendVariableActivity.innerTypeProperties + = AppendVariableActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAppendVariableActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAppendVariableActivity.withAdditionalProperties(additionalProperties); + + return deserializedAppendVariableActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ArmIdWrapper.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ArmIdWrapper.java index 92e3eabafd69..45e77c83c24b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ArmIdWrapper.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ArmIdWrapper.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A wrapper for an ARM resource id. */ @Immutable -public final class ArmIdWrapper { +public final class ArmIdWrapper implements JsonSerializable { /* * The id property. */ - @JsonProperty(value = "id", access = JsonProperty.Access.WRITE_ONLY) private String id; /** @@ -40,4 +43,39 @@ public String id() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ArmIdWrapper from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ArmIdWrapper if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ArmIdWrapper. + */ + public static ArmIdWrapper fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ArmIdWrapper deserializedArmIdWrapper = new ArmIdWrapper(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedArmIdWrapper.id = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedArmIdWrapper; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java index 91c2a19dd0dd..80a36e944da3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AsanaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AsanaLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Asana. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AsanaLinkedService.class, visible = true) -@JsonTypeName("Asana") @Fluent public final class AsanaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Asana"; /* * Asana linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AsanaLinkedServiceTypeProperties innerTypeProperties = new AsanaLinkedServiceTypeProperties(); /** @@ -161,4 +157,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AsanaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AsanaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AsanaLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AsanaLinkedService. + */ + public static AsanaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AsanaLinkedService deserializedAsanaLinkedService = new AsanaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAsanaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAsanaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAsanaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAsanaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAsanaLinkedService.innerTypeProperties + = AsanaLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAsanaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAsanaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAsanaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java index 6e7eb40a15a0..3b5c377401a5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AvroDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Avro dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroDataset.class, visible = true) -@JsonTypeName("Avro") @Fluent public final class AvroDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Avro"; /* * Avro dataset properties. */ - @JsonProperty(value = "typeProperties") private AvroDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AvroDataset. + */ + public static AvroDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroDataset deserializedAvroDataset = new AvroDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAvroDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAvroDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAvroDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAvroDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAvroDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAvroDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAvroDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAvroDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAvroDataset.innerTypeProperties = AvroDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAvroDataset.withAdditionalProperties(additionalProperties); + + return deserializedAvroDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java index 937f0ce9f7a6..7dae87b63031 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroFormat.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The data stored in Avro format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroFormat.class, visible = true) -@JsonTypeName("AvroFormat") @Fluent public final class AvroFormat extends DatasetStorageFormat { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AvroFormat"; /** @@ -67,4 +65,57 @@ public AvroFormat withDeserializer(Object deserializer) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("serializer", serializer()); + jsonWriter.writeUntypedField("deserializer", deserializer()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroFormat if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the AvroFormat. + */ + public static AvroFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroFormat deserializedAvroFormat = new AvroFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serializer".equals(fieldName)) { + deserializedAvroFormat.withSerializer(reader.readUntyped()); + } else if ("deserializer".equals(fieldName)) { + deserializedAvroFormat.withDeserializer(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAvroFormat.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAvroFormat.withAdditionalProperties(additionalProperties); + + return deserializedAvroFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java index c24f6295ddb0..2811c35188a5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Avro sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroSink.class, visible = true) -@JsonTypeName("AvroSink") @Fluent public final class AvroSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AvroSink"; /* * Avro store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /* * Avro format settings. */ - @JsonProperty(value = "formatSettings") private AvroWriteSettings formatSettings; /** @@ -161,4 +157,75 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the AvroSink. + */ + public static AvroSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroSink deserializedAvroSink = new AvroSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAvroSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAvroSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAvroSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAvroSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAvroSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAvroSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAvroSink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedAvroSink.storeSettings = StoreWriteSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedAvroSink.formatSettings = AvroWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAvroSink.withAdditionalProperties(additionalProperties); + + return deserializedAvroSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java index 99bef5f33773..797779349591 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Avro source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroSource.class, visible = true) -@JsonTypeName("AvroSource") @Fluent public final class AvroSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AvroSource"; /* * Avro store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public void validate() { storeSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the AvroSource. + */ + public static AvroSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroSource deserializedAvroSource = new AvroSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAvroSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAvroSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAvroSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAvroSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAvroSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedAvroSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAvroSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAvroSource.withAdditionalProperties(additionalProperties); + + return deserializedAvroSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java index eb0de05fac60..e5d13d6b93ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AvroWriteSettings.java @@ -5,49 +5,43 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Avro write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AvroWriteSettings.class, visible = true) -@JsonTypeName("AvroWriteSettings") @Fluent public final class AvroWriteSettings extends FormatWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AvroWriteSettings"; /* * Top level record name in write result, which is required in AVRO spec. */ - @JsonProperty(value = "recordName") private String recordName; /* * Record namespace in the write result. */ - @JsonProperty(value = "recordNamespace") private String recordNamespace; /* * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* * Specifies the file name pattern _. when copy from non-file based store * without partitionOptions. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; /** @@ -161,4 +155,63 @@ public AvroWriteSettings withFileNamePrefix(Object fileNamePrefix) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("recordName", this.recordName); + jsonWriter.writeStringField("recordNamespace", this.recordNamespace); + jsonWriter.writeUntypedField("maxRowsPerFile", this.maxRowsPerFile); + jsonWriter.writeUntypedField("fileNamePrefix", this.fileNamePrefix); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AvroWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AvroWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AvroWriteSettings. + */ + public static AvroWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AvroWriteSettings deserializedAvroWriteSettings = new AvroWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedAvroWriteSettings.type = reader.getString(); + } else if ("recordName".equals(fieldName)) { + deserializedAvroWriteSettings.recordName = reader.getString(); + } else if ("recordNamespace".equals(fieldName)) { + deserializedAvroWriteSettings.recordNamespace = reader.getString(); + } else if ("maxRowsPerFile".equals(fieldName)) { + deserializedAvroWriteSettings.maxRowsPerFile = reader.readUntyped(); + } else if ("fileNamePrefix".equals(fieldName)) { + deserializedAvroWriteSettings.fileNamePrefix = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAvroWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedAvroWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java index d267bf98f47c..c1a608f8aa74 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzPowerShellSetup.java @@ -6,30 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzPowerShellSetupTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; /** * The express custom setup of installing Azure PowerShell. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzPowerShellSetup.class, visible = true) -@JsonTypeName("AzPowerShellSetup") @Fluent public final class AzPowerShellSetup extends CustomSetupBase { /* * The type of custom setup. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzPowerShellSetup"; /* * Install Azure PowerShell type properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzPowerShellSetupTypeProperties innerTypeProperties = new AzPowerShellSetupTypeProperties(); /** @@ -98,4 +93,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzPowerShellSetup.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzPowerShellSetup from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzPowerShellSetup if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzPowerShellSetup. + */ + public static AzPowerShellSetup fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzPowerShellSetup deserializedAzPowerShellSetup = new AzPowerShellSetup(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("typeProperties".equals(fieldName)) { + deserializedAzPowerShellSetup.innerTypeProperties + = AzPowerShellSetupTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzPowerShellSetup.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzPowerShellSetup; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java index a57b51fcf758..ffa0739fe063 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBatchLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureBatchLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Batch linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBatchLinkedService.class, - visible = true) -@JsonTypeName("AzureBatch") @Fluent public final class AzureBatchLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBatch"; /* * Azure Batch linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureBatchLinkedServiceTypeProperties innerTypeProperties = new AzureBatchLinkedServiceTypeProperties(); /** @@ -280,4 +272,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureBatchLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBatchLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBatchLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBatchLinkedService. + */ + public static AzureBatchLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBatchLinkedService deserializedAzureBatchLinkedService = new AzureBatchLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureBatchLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureBatchLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureBatchLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureBatchLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureBatchLinkedService.innerTypeProperties + = AzureBatchLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureBatchLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBatchLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureBatchLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java index c50b49a055a3..4a81357629ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure Blob storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobDataset.class, visible = true) -@JsonTypeName("AzureBlob") @Fluent public final class AzureBlobDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlob"; /* * Azure Blob dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureBlobDatasetTypeProperties innerTypeProperties; /** @@ -300,4 +296,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBlobDataset. + */ + public static AzureBlobDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobDataset deserializedAzureBlobDataset = new AzureBlobDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureBlobDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureBlobDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureBlobDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureBlobDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureBlobDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureBlobDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureBlobDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureBlobDataset.innerTypeProperties = AzureBlobDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java index c76faab16b2b..0cc2e2f2bbfd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure Data Lake Storage Gen2 storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSDataset.class, visible = true) -@JsonTypeName("AzureBlobFSFile") @Fluent public final class AzureBlobFSDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSFile"; /* * Azure Data Lake Storage Gen2 dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureBlobFSDatasetTypeProperties innerTypeProperties; /** @@ -229,4 +225,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBlobFSDataset. + */ + public static AzureBlobFSDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSDataset deserializedAzureBlobFSDataset = new AzureBlobFSDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureBlobFSDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureBlobFSDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureBlobFSDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureBlobFSDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureBlobFSDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureBlobFSDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureBlobFSDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureBlobFSDataset.innerTypeProperties + = AzureBlobFSDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java index 7373b38c080e..88a8f71b89f4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Data Lake Storage Gen2 linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobFSLinkedService.class, - visible = true) -@JsonTypeName("AzureBlobFS") @Fluent public final class AzureBlobFSLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFS"; /* * Azure Data Lake Storage Gen2 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureBlobFSLinkedServiceTypeProperties innerTypeProperties = new AzureBlobFSLinkedServiceTypeProperties(); /** @@ -421,4 +413,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureBlobFSLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBlobFSLinkedService. + */ + public static AzureBlobFSLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSLinkedService deserializedAzureBlobFSLinkedService = new AzureBlobFSLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureBlobFSLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureBlobFSLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureBlobFSLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureBlobFSLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureBlobFSLinkedService.innerTypeProperties + = AzureBlobFSLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java index 624cf465471d..9b444f842b3f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSLocation.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of azure blobFS dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSLocation.class, visible = true) -@JsonTypeName("AzureBlobFSLocation") @Fluent public final class AzureBlobFSLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSLocation"; /* * Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileSystem") private Object fileSystem; /** @@ -95,4 +92,60 @@ public AzureBlobFSLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("fileSystem", this.fileSystem); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSLocation. + */ + public static AzureBlobFSLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSLocation deserializedAzureBlobFSLocation = new AzureBlobFSLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureBlobFSLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAzureBlobFSLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSLocation.type = reader.getString(); + } else if ("fileSystem".equals(fieldName)) { + deserializedAzureBlobFSLocation.fileSystem = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSLocation.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java index 9aa677cc8637..c4673ffa065c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSReadSettings.java @@ -5,84 +5,69 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure blobFS read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobFSReadSettings.class, - visible = true) -@JsonTypeName("AzureBlobFSReadSettings") @Fluent public final class AzureBlobFSReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -326,4 +311,84 @@ public AzureBlobFSReadSettings withDisableMetricsCollection(Object disableMetric public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSReadSettings. + */ + public static AzureBlobFSReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSReadSettings deserializedAzureBlobFSReadSettings = new AzureBlobFSReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAzureBlobFSReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java index d2f372edb551..91aed7fee1b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSink.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * A copy activity Azure Data Lake Storage Gen2 sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSSink.class, visible = true) -@JsonTypeName("AzureBlobFSSink") @Fluent public final class AzureBlobFSSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSSink"; /* * The type of copy behavior for copy sink. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "copyBehavior") private Object copyBehavior; /* * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array * of objects). */ - @JsonProperty(value = "metadata") private List metadata; /** @@ -164,4 +160,76 @@ public void validate() { metadata().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("copyBehavior", this.copyBehavior); + jsonWriter.writeArrayField("metadata", this.metadata, (writer, element) -> writer.writeJson(element)); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSSink. + */ + public static AzureBlobFSSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSSink deserializedAzureBlobFSSink = new AzureBlobFSSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureBlobFSSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureBlobFSSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureBlobFSSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureBlobFSSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobFSSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobFSSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSSink.type = reader.getString(); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureBlobFSSink.copyBehavior = reader.readUntyped(); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedAzureBlobFSSink.metadata = metadata; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java index fdef5556c243..299d0a649acd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure BlobFS source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureBlobFSSource.class, visible = true) -@JsonTypeName("AzureBlobFSSource") @Fluent public final class AzureBlobFSSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSSource"; /* * Treat empty as null. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "treatEmptyAsNull") private Object treatEmptyAsNull; /* * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "skipHeaderLineCount") private Object skipHeaderLineCount; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /** @@ -168,4 +163,72 @@ public AzureBlobFSSource withDisableMetricsCollection(Object disableMetricsColle public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("treatEmptyAsNull", this.treatEmptyAsNull); + jsonWriter.writeUntypedField("skipHeaderLineCount", this.skipHeaderLineCount); + jsonWriter.writeUntypedField("recursive", this.recursive); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSSource. + */ + public static AzureBlobFSSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSSource deserializedAzureBlobFSSource = new AzureBlobFSSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureBlobFSSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureBlobFSSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobFSSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobFSSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSSource.type = reader.getString(); + } else if ("treatEmptyAsNull".equals(fieldName)) { + deserializedAzureBlobFSSource.treatEmptyAsNull = reader.readUntyped(); + } else if ("skipHeaderLineCount".equals(fieldName)) { + deserializedAzureBlobFSSource.skipHeaderLineCount = reader.readUntyped(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureBlobFSSource.recursive = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java index 1160807b4ef3..db03a50b421d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobFSWriteSettings.java @@ -5,34 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure blobFS write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobFSWriteSettings.class, - visible = true) -@JsonTypeName("AzureBlobFSWriteSettings") @Fluent public final class AzureBlobFSWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobFSWriteSettings"; /* * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "blockSizeInMB") private Object blockSizeInMB; /** @@ -118,4 +111,67 @@ public AzureBlobFSWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("blockSizeInMB", this.blockSizeInMB); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobFSWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobFSWriteSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobFSWriteSettings. + */ + public static AzureBlobFSWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobFSWriteSettings deserializedAzureBlobFSWriteSettings = new AzureBlobFSWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobFSWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobFSWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureBlobFSWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedAzureBlobFSWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobFSWriteSettings.type = reader.getString(); + } else if ("blockSizeInMB".equals(fieldName)) { + deserializedAzureBlobFSWriteSettings.blockSizeInMB = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobFSWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobFSWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java index fda293096d57..173afd4ada0c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The azure blob storage linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobStorageLinkedService.class, - visible = true) -@JsonTypeName("AzureBlobStorage") @Fluent public final class AzureBlobStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobStorage"; /* * Azure Blob Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureBlobStorageLinkedServiceTypeProperties innerTypeProperties = new AzureBlobStorageLinkedServiceTypeProperties(); @@ -464,4 +456,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureBlobStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureBlobStorageLinkedService. + */ + public static AzureBlobStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageLinkedService deserializedAzureBlobStorageLinkedService + = new AzureBlobStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureBlobStorageLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureBlobStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureBlobStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureBlobStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureBlobStorageLinkedService.innerTypeProperties + = AzureBlobStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java index f499d459acd9..f34879047cfa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageLocation.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of azure blob dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobStorageLocation.class, - visible = true) -@JsonTypeName("AzureBlobStorageLocation") @Fluent public final class AzureBlobStorageLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobStorageLocation"; /* * Specify the container of azure blob. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "container") private Object container; /** @@ -99,4 +92,60 @@ public AzureBlobStorageLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("container", this.container); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageLocation. + */ + public static AzureBlobStorageLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageLocation deserializedAzureBlobStorageLocation = new AzureBlobStorageLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureBlobStorageLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAzureBlobStorageLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobStorageLocation.type = reader.getString(); + } else if ("container".equals(fieldName)) { + deserializedAzureBlobStorageLocation.container = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobStorageLocation.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobStorageLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java index 1da0e74a4b9a..1b0bac54a705 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageReadSettings.java @@ -5,90 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure blob read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobStorageReadSettings.class, - visible = true) -@JsonTypeName("AzureBlobStorageReadSettings") @Fluent public final class AzureBlobStorageReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobStorageReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Azure blob wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -354,4 +338,87 @@ public AzureBlobStorageReadSettings withDisableMetricsCollection(Object disableM public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageReadSettings if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageReadSettings. + */ + public static AzureBlobStorageReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageReadSettings deserializedAzureBlobStorageReadSettings = new AzureBlobStorageReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAzureBlobStorageReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobStorageReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobStorageReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java index 578d94ad4627..0b8f73036dcf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureBlobStorageWriteSettings.java @@ -5,34 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure blob write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureBlobStorageWriteSettings.class, - visible = true) -@JsonTypeName("AzureBlobStorageWriteSettings") @Fluent public final class AzureBlobStorageWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureBlobStorageWriteSettings"; /* * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "blockSizeInMB") private Object blockSizeInMB; /** @@ -118,4 +111,68 @@ public AzureBlobStorageWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("blockSizeInMB", this.blockSizeInMB); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageWriteSettings if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageWriteSettings. + */ + public static AzureBlobStorageWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageWriteSettings deserializedAzureBlobStorageWriteSettings + = new AzureBlobStorageWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureBlobStorageWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureBlobStorageWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureBlobStorageWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedAzureBlobStorageWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedAzureBlobStorageWriteSettings.type = reader.getString(); + } else if ("blockSizeInMB".equals(fieldName)) { + deserializedAzureBlobStorageWriteSettings.blockSizeInMB = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureBlobStorageWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureBlobStorageWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java index e8b37844d163..65c1e5f4799c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerCommandActivity.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerCommandActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure Data Explorer command activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataExplorerCommandActivity.class, - visible = true) -@JsonTypeName("AzureDataExplorerCommand") @Fluent public final class AzureDataExplorerCommandActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataExplorerCommand"; /* * Azure Data Explorer command activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDataExplorerCommandActivityTypeProperties innerTypeProperties = new AzureDataExplorerCommandActivityTypeProperties(); @@ -203,4 +196,88 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerCommandActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerCommandActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerCommandActivity if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerCommandActivity. + */ + public static AzureDataExplorerCommandActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerCommandActivity deserializedAzureDataExplorerCommandActivity + = new AzureDataExplorerCommandActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity + .withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAzureDataExplorerCommandActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAzureDataExplorerCommandActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity.innerTypeProperties + = AzureDataExplorerCommandActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDataExplorerCommandActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataExplorerCommandActivity.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataExplorerCommandActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java index 04cfee8c6c59..693691bf7a0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Data Explorer (Kusto) linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataExplorerLinkedService.class, - visible = true) -@JsonTypeName("AzureDataExplorer") @Fluent public final class AzureDataExplorerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataExplorer"; /* * Azure Data Explorer (Kusto) linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDataExplorerLinkedServiceTypeProperties innerTypeProperties = new AzureDataExplorerLinkedServiceTypeProperties(); @@ -264,4 +256,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerLinkedService. + */ + public static AzureDataExplorerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerLinkedService deserializedAzureDataExplorerLinkedService + = new AzureDataExplorerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureDataExplorerLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDataExplorerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDataExplorerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDataExplorerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataExplorerLinkedService.innerTypeProperties + = AzureDataExplorerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDataExplorerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataExplorerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataExplorerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java index 0c36692a3a50..0b1cdcf3a01e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSink.java @@ -5,41 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Data Explorer sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureDataExplorerSink.class, visible = true) -@JsonTypeName("AzureDataExplorerSink") @Fluent public final class AzureDataExplorerSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataExplorerSink"; /* * A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. */ - @JsonProperty(value = "ingestionMappingName") private Object ingestionMappingName; /* * An explicit column mapping description provided in a json format. Type: string. */ - @JsonProperty(value = "ingestionMappingAsJson") private Object ingestionMappingAsJson; /* * If set to true, any aggregation will be skipped. Default is false. Type: boolean. */ - @JsonProperty(value = "flushImmediately") private Object flushImmediately; /** @@ -187,4 +182,78 @@ public AzureDataExplorerSink withDisableMetricsCollection(Object disableMetricsC public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("ingestionMappingName", this.ingestionMappingName); + jsonWriter.writeUntypedField("ingestionMappingAsJson", this.ingestionMappingAsJson); + jsonWriter.writeUntypedField("flushImmediately", this.flushImmediately); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerSink if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataExplorerSink. + */ + public static AzureDataExplorerSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerSink deserializedAzureDataExplorerSink = new AzureDataExplorerSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureDataExplorerSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureDataExplorerSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureDataExplorerSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureDataExplorerSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataExplorerSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataExplorerSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDataExplorerSink.type = reader.getString(); + } else if ("ingestionMappingName".equals(fieldName)) { + deserializedAzureDataExplorerSink.ingestionMappingName = reader.readUntyped(); + } else if ("ingestionMappingAsJson".equals(fieldName)) { + deserializedAzureDataExplorerSink.ingestionMappingAsJson = reader.readUntyped(); + } else if ("flushImmediately".equals(fieldName)) { + deserializedAzureDataExplorerSink.flushImmediately = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataExplorerSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataExplorerSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java index 3e5832671884..47269e42e4e8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerSource.java @@ -6,55 +6,45 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Data Explorer (Kusto) source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataExplorerSource.class, - visible = true) -@JsonTypeName("AzureDataExplorerSource") @Fluent public final class AzureDataExplorerSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataExplorerSource"; /* * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "query", required = true) private Object query; /* * The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a * certain row-count limit. */ - @JsonProperty(value = "noTruncation") private Object noTruncation; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -212,4 +202,76 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerSource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("noTruncation", this.noTruncation); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerSource. + */ + public static AzureDataExplorerSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerSource deserializedAzureDataExplorerSource = new AzureDataExplorerSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureDataExplorerSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureDataExplorerSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataExplorerSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataExplorerSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("query".equals(fieldName)) { + deserializedAzureDataExplorerSource.query = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedAzureDataExplorerSource.type = reader.getString(); + } else if ("noTruncation".equals(fieldName)) { + deserializedAzureDataExplorerSource.noTruncation = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzureDataExplorerSource.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzureDataExplorerSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataExplorerSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataExplorerSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java index e60bdb457bd4..38e867ff243f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataExplorerTableDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure Data Explorer (Kusto) dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataExplorerTableDataset.class, - visible = true) -@JsonTypeName("AzureDataExplorerTable") @Fluent public final class AzureDataExplorerTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataExplorerTable"; /* * Azure Data Explorer (Kusto) dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDataExplorerDatasetTypeProperties innerTypeProperties = new AzureDataExplorerDatasetTypeProperties(); /** @@ -169,4 +161,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataExplorerTableDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataExplorerTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataExplorerTableDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataExplorerTableDataset. + */ + public static AzureDataExplorerTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataExplorerTableDataset deserializedAzureDataExplorerTableDataset + = new AzureDataExplorerTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDataExplorerTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDataExplorerTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.innerTypeProperties + = AzureDataExplorerDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDataExplorerTableDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataExplorerTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataExplorerTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java index 62494197e096..9b7aeea82616 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeAnalyticsLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeAnalyticsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Data Lake Analytics linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeAnalyticsLinkedService.class, - visible = true) -@JsonTypeName("AzureDataLakeAnalytics") @Fluent public final class AzureDataLakeAnalyticsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeAnalytics"; /* * Azure Data Lake Analytics linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDataLakeAnalyticsLinkedServiceTypeProperties innerTypeProperties = new AzureDataLakeAnalyticsLinkedServiceTypeProperties(); @@ -318,4 +310,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataLakeAnalyticsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeAnalyticsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeAnalyticsLinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataLakeAnalyticsLinkedService. + */ + public static AzureDataLakeAnalyticsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeAnalyticsLinkedService deserializedAzureDataLakeAnalyticsLinkedService + = new AzureDataLakeAnalyticsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDataLakeAnalyticsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDataLakeAnalyticsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedService.innerTypeProperties + = AzureDataLakeAnalyticsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeAnalyticsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeAnalyticsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeAnalyticsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java index 4a16934b17cb..51252a28d078 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Data Lake Store dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreDataset.class, - visible = true) -@JsonTypeName("AzureDataLakeStoreFile") @Fluent public final class AzureDataLakeStoreDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreFile"; /* * Azure Data Lake Store dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureDataLakeStoreDatasetTypeProperties innerTypeProperties; /** @@ -233,4 +225,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreDataset. + */ + public static AzureDataLakeStoreDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreDataset deserializedAzureDataLakeStoreDataset = new AzureDataLakeStoreDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDataLakeStoreDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDataLakeStoreDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataLakeStoreDataset.innerTypeProperties + = AzureDataLakeStoreDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java index 56d0fd60abd8..03fddacab070 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Data Lake Store linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreLinkedService.class, - visible = true) -@JsonTypeName("AzureDataLakeStore") @Fluent public final class AzureDataLakeStoreLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStore"; /* * Azure Data Lake Store linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDataLakeStoreLinkedServiceTypeProperties innerTypeProperties = new AzureDataLakeStoreLinkedServiceTypeProperties(); @@ -366,4 +358,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDataLakeStoreLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreLinkedService. + */ + public static AzureDataLakeStoreLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreLinkedService deserializedAzureDataLakeStoreLinkedService + = new AzureDataLakeStoreLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDataLakeStoreLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDataLakeStoreLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedService.innerTypeProperties + = AzureDataLakeStoreLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java index b950b2deabc8..19666aec20c8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreLocation.java @@ -5,27 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of azure data lake store dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreLocation.class, - visible = true) -@JsonTypeName("AzureDataLakeStoreLocation") @Fluent public final class AzureDataLakeStoreLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreLocation"; /** @@ -71,4 +65,57 @@ public AzureDataLakeStoreLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreLocation. + */ + public static AzureDataLakeStoreLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreLocation deserializedAzureDataLakeStoreLocation = new AzureDataLakeStoreLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureDataLakeStoreLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAzureDataLakeStoreLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreLocation.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java index a82f90a41187..3948f9203502 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreReadSettings.java @@ -5,53 +5,43 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure data lake store read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreReadSettings.class, - visible = true) -@JsonTypeName("AzureDataLakeStoreReadSettings") @Fluent public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * ADLS wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * ADLS wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* @@ -59,7 +49,6 @@ public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { * folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "listAfter") private Object listAfter; /* @@ -67,38 +56,32 @@ public final class AzureDataLakeStoreReadSettings extends StoreReadSettings { * folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "listBefore") private Object listBefore; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -388,4 +371,91 @@ public AzureDataLakeStoreReadSettings withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("listAfter", this.listAfter); + jsonWriter.writeUntypedField("listBefore", this.listBefore); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreReadSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreReadSettings. + */ + public static AzureDataLakeStoreReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreReadSettings deserializedAzureDataLakeStoreReadSettings + = new AzureDataLakeStoreReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.fileListPath = reader.readUntyped(); + } else if ("listAfter".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.listAfter = reader.readUntyped(); + } else if ("listBefore".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.listBefore = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAzureDataLakeStoreReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java index 515f1d8355e8..5dd39463572a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Data Lake Store sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureDataLakeStoreSink.class, visible = true) -@JsonTypeName("AzureDataLakeStoreSink") @Fluent public final class AzureDataLakeStoreSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreSink"; /* * The type of copy behavior for copy sink. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "copyBehavior") private Object copyBehavior; /* * Single File Parallel. */ - @JsonProperty(value = "enableAdlsSingleFileParallel") private Object enableAdlsSingleFileParallel; /** @@ -157,4 +153,75 @@ public AzureDataLakeStoreSink withDisableMetricsCollection(Object disableMetrics public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("copyBehavior", this.copyBehavior); + jsonWriter.writeUntypedField("enableAdlsSingleFileParallel", this.enableAdlsSingleFileParallel); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreSink if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreSink. + */ + public static AzureDataLakeStoreSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreSink deserializedAzureDataLakeStoreSink = new AzureDataLakeStoreSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.type = reader.getString(); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.copyBehavior = reader.readUntyped(); + } else if ("enableAdlsSingleFileParallel".equals(fieldName)) { + deserializedAzureDataLakeStoreSink.enableAdlsSingleFileParallel = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java index 6e27707275a6..439a567f0b37 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreSource.java @@ -5,34 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Data Lake source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreSource.class, - visible = true) -@JsonTypeName("AzureDataLakeStoreSource") @Fluent public final class AzureDataLakeStoreSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreSource"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /** @@ -118,4 +111,66 @@ public AzureDataLakeStoreSource withDisableMetricsCollection(Object disableMetri public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreSource. + */ + public static AzureDataLakeStoreSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreSource deserializedAzureDataLakeStoreSource = new AzureDataLakeStoreSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureDataLakeStoreSource.recursive = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java index eb5a9686e0a7..c0788778cae8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDataLakeStoreWriteSettings.java @@ -5,35 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure data lake store write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDataLakeStoreWriteSettings.class, - visible = true) -@JsonTypeName("AzureDataLakeStoreWriteSettings") @Fluent public final class AzureDataLakeStoreWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDataLakeStoreWriteSettings"; /* * Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of * "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "expiryDateTime") private Object expiryDateTime; /** @@ -121,4 +114,68 @@ public AzureDataLakeStoreWriteSettings withMetadata(List metadata) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("expiryDateTime", this.expiryDateTime); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDataLakeStoreWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDataLakeStoreWriteSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDataLakeStoreWriteSettings. + */ + public static AzureDataLakeStoreWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDataLakeStoreWriteSettings deserializedAzureDataLakeStoreWriteSettings + = new AzureDataLakeStoreWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDataLakeStoreWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDataLakeStoreWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureDataLakeStoreWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedAzureDataLakeStoreWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedAzureDataLakeStoreWriteSettings.type = reader.getString(); + } else if ("expiryDateTime".equals(fieldName)) { + deserializedAzureDataLakeStoreWriteSettings.expiryDateTime = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDataLakeStoreWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureDataLakeStoreWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java index 2e7fa0535bf6..ec5b45e09a50 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDeltaLakeDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Databricks Delta Lake dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeDataset.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLakeDataset") @Fluent public final class AzureDatabricksDeltaLakeDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLakeDataset"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private AzureDatabricksDeltaLakeDatasetTypeProperties innerTypeProperties; /** @@ -183,4 +175,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeDataset. + */ + public static AzureDatabricksDeltaLakeDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeDataset deserializedAzureDatabricksDeltaLakeDataset + = new AzureDatabricksDeltaLakeDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDatabricksDeltaLakeDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksDeltaLakeDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeDataset.innerTypeProperties + = AzureDatabricksDeltaLakeDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java index f76ddd350a53..2b4a0d517774 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeExportCommand.java @@ -5,41 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure Databricks Delta Lake export command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeExportCommand.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLakeExportCommand") @Fluent public final class AzureDatabricksDeltaLakeExportCommand extends ExportSettings { /* * The export setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLakeExportCommand"; /* * Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "dateFormat") private Object dateFormat; /* * Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "timestampFormat") private Object timestampFormat; /** @@ -111,4 +103,58 @@ public AzureDatabricksDeltaLakeExportCommand withTimestampFormat(Object timestam public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("dateFormat", this.dateFormat); + jsonWriter.writeUntypedField("timestampFormat", this.timestampFormat); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeExportCommand from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeExportCommand if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeExportCommand. + */ + public static AzureDatabricksDeltaLakeExportCommand fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeExportCommand deserializedAzureDatabricksDeltaLakeExportCommand + = new AzureDatabricksDeltaLakeExportCommand(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeExportCommand.type = reader.getString(); + } else if ("dateFormat".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeExportCommand.dateFormat = reader.readUntyped(); + } else if ("timestampFormat".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeExportCommand.timestampFormat = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeExportCommand.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeExportCommand; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java index 23300d092b00..8ad6957b6b32 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeImportCommand.java @@ -5,41 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure Databricks Delta Lake import command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeImportCommand.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLakeImportCommand") @Fluent public final class AzureDatabricksDeltaLakeImportCommand extends ImportSettings { /* * The import setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLakeImportCommand"; /* * Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "dateFormat") private Object dateFormat; /* * Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "timestampFormat") private Object timestampFormat; /** @@ -111,4 +103,58 @@ public AzureDatabricksDeltaLakeImportCommand withTimestampFormat(Object timestam public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("dateFormat", this.dateFormat); + jsonWriter.writeUntypedField("timestampFormat", this.timestampFormat); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeImportCommand from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeImportCommand if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeImportCommand. + */ + public static AzureDatabricksDeltaLakeImportCommand fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeImportCommand deserializedAzureDatabricksDeltaLakeImportCommand + = new AzureDatabricksDeltaLakeImportCommand(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeImportCommand.type = reader.getString(); + } else if ("dateFormat".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeImportCommand.dateFormat = reader.readUntyped(); + } else if ("timestampFormat".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeImportCommand.timestampFormat = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeImportCommand.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeImportCommand; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java index f8e48dc08132..a9df9d797ac4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDetltaLakeLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Databricks Delta Lake linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeLinkedService.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLake") @Fluent public final class AzureDatabricksDeltaLakeLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLake"; /* * Azure Databricks Delta Lake linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDatabricksDetltaLakeLinkedServiceTypeProperties innerTypeProperties = new AzureDatabricksDetltaLakeLinkedServiceTypeProperties(); @@ -268,4 +260,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDatabricksDeltaLakeLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeLinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeLinkedService. + */ + public static AzureDatabricksDeltaLakeLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeLinkedService deserializedAzureDatabricksDeltaLakeLinkedService + = new AzureDatabricksDeltaLakeLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDatabricksDeltaLakeLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksDeltaLakeLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeLinkedService.innerTypeProperties + = AzureDatabricksDetltaLakeLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java index 9eb4985e5dea..9fc1e73abb03 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSink.java @@ -5,39 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Databricks Delta Lake sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeSink.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLakeSink") @Fluent public final class AzureDatabricksDeltaLakeSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLakeSink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * Azure Databricks Delta Lake import settings. */ - @JsonProperty(value = "importSettings") private AzureDatabricksDeltaLakeImportCommand importSettings; /** @@ -162,4 +154,76 @@ public void validate() { importSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeJsonField("importSettings", this.importSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeSink if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeSink. + */ + public static AzureDatabricksDeltaLakeSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeSink deserializedAzureDatabricksDeltaLakeSink = new AzureDatabricksDeltaLakeSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.preCopyScript = reader.readUntyped(); + } else if ("importSettings".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSink.importSettings + = AzureDatabricksDeltaLakeImportCommand.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java index 2444ddddd623..ce05fb71c6d7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksDeltaLakeSource.java @@ -5,39 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Databricks Delta Lake source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksDeltaLakeSource.class, - visible = true) -@JsonTypeName("AzureDatabricksDeltaLakeSource") @Fluent public final class AzureDatabricksDeltaLakeSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricksDeltaLakeSource"; /* * Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Azure Databricks Delta Lake export settings. */ - @JsonProperty(value = "exportSettings") private AzureDatabricksDeltaLakeExportCommand exportSettings; /** @@ -146,4 +138,71 @@ public void validate() { exportSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeJsonField("exportSettings", this.exportSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksDeltaLakeSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksDeltaLakeSource if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureDatabricksDeltaLakeSource. + */ + public static AzureDatabricksDeltaLakeSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksDeltaLakeSource deserializedAzureDatabricksDeltaLakeSource + = new AzureDatabricksDeltaLakeSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.query = reader.readUntyped(); + } else if ("exportSettings".equals(fieldName)) { + deserializedAzureDatabricksDeltaLakeSource.exportSettings + = AzureDatabricksDeltaLakeExportCommand.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksDeltaLakeSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksDeltaLakeSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java index a546fa8d5976..6ee98f02c49b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureDatabricksLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Databricks linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureDatabricksLinkedService.class, - visible = true) -@JsonTypeName("AzureDatabricks") @Fluent public final class AzureDatabricksLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureDatabricks"; /* * Azure Databricks linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureDatabricksLinkedServiceTypeProperties innerTypeProperties = new AzureDatabricksLinkedServiceTypeProperties(); @@ -603,4 +595,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureDatabricksLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureDatabricksLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureDatabricksLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureDatabricksLinkedService. + */ + public static AzureDatabricksLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureDatabricksLinkedService deserializedAzureDatabricksLinkedService = new AzureDatabricksLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureDatabricksLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureDatabricksLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureDatabricksLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureDatabricksLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureDatabricksLinkedService.innerTypeProperties + = AzureDatabricksLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureDatabricksLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureDatabricksLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureDatabricksLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java index 7b43cddb7849..1188577cd614 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureFileStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure File Storage linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureFileStorageLinkedService.class, - visible = true) -@JsonTypeName("AzureFileStorage") @Fluent public final class AzureFileStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFileStorage"; /* * Azure File Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureFileStorageLinkedServiceTypeProperties innerTypeProperties = new AzureFileStorageLinkedServiceTypeProperties(); @@ -340,6 +332,54 @@ public AzureFileStorageLinkedService withEncryptedCredential(String encryptedCre return this; } + /** + * Get the serviceEndpoint property: File service endpoint of the Azure File Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @return the serviceEndpoint value. + */ + public Object serviceEndpoint() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().serviceEndpoint(); + } + + /** + * Set the serviceEndpoint property: File service endpoint of the Azure File Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @param serviceEndpoint the serviceEndpoint value to set. + * @return the AzureFileStorageLinkedService object itself. + */ + public AzureFileStorageLinkedService withServiceEndpoint(Object serviceEndpoint) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureFileStorageLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withServiceEndpoint(serviceEndpoint); + return this; + } + + /** + * Get the credential property: The credential reference containing authentication information. + * + * @return the credential value. + */ + public CredentialReference credential() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().credential(); + } + + /** + * Set the credential property: The credential reference containing authentication information. + * + * @param credential the credential value to set. + * @return the AzureFileStorageLinkedService object itself. + */ + public AzureFileStorageLinkedService withCredential(CredentialReference credential) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureFileStorageLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withCredential(credential); + return this; + } + /** * Validates the instance. * @@ -358,4 +398,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureFileStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFileStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFileStorageLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureFileStorageLinkedService. + */ + public static AzureFileStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFileStorageLinkedService deserializedAzureFileStorageLinkedService + = new AzureFileStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureFileStorageLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureFileStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureFileStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureFileStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureFileStorageLinkedService.innerTypeProperties + = AzureFileStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureFileStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFileStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureFileStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java index 7ef77ce121cc..ed0bc0346ec1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageLocation.java @@ -5,27 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of file server dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureFileStorageLocation.class, - visible = true) -@JsonTypeName("AzureFileStorageLocation") @Fluent public final class AzureFileStorageLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFileStorageLocation"; /** @@ -71,4 +65,57 @@ public AzureFileStorageLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFileStorageLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFileStorageLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureFileStorageLocation. + */ + public static AzureFileStorageLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFileStorageLocation deserializedAzureFileStorageLocation = new AzureFileStorageLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedAzureFileStorageLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedAzureFileStorageLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureFileStorageLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFileStorageLocation.withAdditionalProperties(additionalProperties); + + return deserializedAzureFileStorageLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java index bb0d46439495..88d9eb22d025 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageReadSettings.java @@ -5,91 +5,75 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Azure File Storage read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureFileStorageReadSettings.class, - visible = true) -@JsonTypeName("AzureFileStorageReadSettings") @Fluent public final class AzureFileStorageReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFileStorageReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -355,4 +339,87 @@ public AzureFileStorageReadSettings withDisableMetricsCollection(Object disableM public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFileStorageReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFileStorageReadSettings if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureFileStorageReadSettings. + */ + public static AzureFileStorageReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFileStorageReadSettings deserializedAzureFileStorageReadSettings = new AzureFileStorageReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedAzureFileStorageReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFileStorageReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureFileStorageReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java index f48365f10d93..90db66462213 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFileStorageWriteSettings.java @@ -5,28 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure File Storage write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureFileStorageWriteSettings.class, - visible = true) -@JsonTypeName("AzureFileStorageWriteSettings") @Fluent public final class AzureFileStorageWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFileStorageWriteSettings"; /** @@ -90,4 +84,65 @@ public AzureFileStorageWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFileStorageWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFileStorageWriteSettings if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureFileStorageWriteSettings. + */ + public static AzureFileStorageWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFileStorageWriteSettings deserializedAzureFileStorageWriteSettings + = new AzureFileStorageWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureFileStorageWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureFileStorageWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedAzureFileStorageWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedAzureFileStorageWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedAzureFileStorageWriteSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFileStorageWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedAzureFileStorageWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java index c7183528b2f2..764fac37fb24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Function activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureFunctionActivity.class, visible = true) -@JsonTypeName("AzureFunctionActivity") @Fluent public final class AzureFunctionActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFunctionActivity"; /* * Azure Function activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureFunctionActivityTypeProperties innerTypeProperties = new AzureFunctionActivityTypeProperties(); /** @@ -249,4 +245,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureFunctionActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFunctionActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFunctionActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureFunctionActivity. + */ + public static AzureFunctionActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFunctionActivity deserializedAzureFunctionActivity = new AzureFunctionActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAzureFunctionActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAzureFunctionActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAzureFunctionActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAzureFunctionActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAzureFunctionActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAzureFunctionActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureFunctionActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedAzureFunctionActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureFunctionActivity.innerTypeProperties + = AzureFunctionActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureFunctionActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFunctionActivity.withAdditionalProperties(additionalProperties); + + return deserializedAzureFunctionActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivityMethod.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivityMethod.java index a7096abf7900..cc19b36cde99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivityMethod.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionActivityMethod.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -62,7 +61,6 @@ public AzureFunctionActivityMethod() { * @param name a name to look for. * @return the corresponding AzureFunctionActivityMethod. */ - @JsonCreator public static AzureFunctionActivityMethod fromString(String name) { return fromString(name, AzureFunctionActivityMethod.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java index a330e9c91254..874295f6a6ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureFunctionLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Function linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureFunctionLinkedService.class, - visible = true) -@JsonTypeName("AzureFunction") @Fluent public final class AzureFunctionLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureFunction"; /* * Azure Function linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureFunctionLinkedServiceTypeProperties innerTypeProperties = new AzureFunctionLinkedServiceTypeProperties(); @@ -264,4 +256,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureFunctionLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureFunctionLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureFunctionLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureFunctionLinkedService. + */ + public static AzureFunctionLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureFunctionLinkedService deserializedAzureFunctionLinkedService = new AzureFunctionLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureFunctionLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureFunctionLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureFunctionLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureFunctionLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureFunctionLinkedService.innerTypeProperties + = AzureFunctionLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureFunctionLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureFunctionLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureFunctionLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java index b52250db5434..c8fa44d9a12c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureKeyVaultLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Key Vault linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureKeyVaultLinkedService.class, - visible = true) -@JsonTypeName("AzureKeyVault") @Fluent public final class AzureKeyVaultLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureKeyVault"; /* * Azure Key Vault linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureKeyVaultLinkedServiceTypeProperties innerTypeProperties = new AzureKeyVaultLinkedServiceTypeProperties(); @@ -166,4 +158,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureKeyVaultLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureKeyVaultLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureKeyVaultLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureKeyVaultLinkedService. + */ + public static AzureKeyVaultLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureKeyVaultLinkedService deserializedAzureKeyVaultLinkedService = new AzureKeyVaultLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureKeyVaultLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureKeyVaultLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureKeyVaultLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureKeyVaultLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureKeyVaultLinkedService.innerTypeProperties + = AzureKeyVaultLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureKeyVaultLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureKeyVaultLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureKeyVaultLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java index 9f796d703dff..e96c8c8368f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureKeyVaultSecretReference.java @@ -6,46 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Key Vault secret reference. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureKeyVaultSecretReference.class, - visible = true) -@JsonTypeName("AzureKeyVaultSecret") @Fluent public final class AzureKeyVaultSecretReference extends SecretBase { /* * Type of the secret. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureKeyVaultSecret"; /* * The Azure Key Vault linked service reference. */ - @JsonProperty(value = "store", required = true) private LinkedServiceReference store; /* * The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "secretName", required = true) private Object secretName; /* * The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "secretVersion") private Object secretVersion; /** @@ -151,4 +140,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureKeyVaultSecretReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("store", this.store); + jsonWriter.writeUntypedField("secretName", this.secretName); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("secretVersion", this.secretVersion); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureKeyVaultSecretReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureKeyVaultSecretReference if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureKeyVaultSecretReference. + */ + public static AzureKeyVaultSecretReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureKeyVaultSecretReference deserializedAzureKeyVaultSecretReference = new AzureKeyVaultSecretReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("store".equals(fieldName)) { + deserializedAzureKeyVaultSecretReference.store = LinkedServiceReference.fromJson(reader); + } else if ("secretName".equals(fieldName)) { + deserializedAzureKeyVaultSecretReference.secretName = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedAzureKeyVaultSecretReference.type = reader.getString(); + } else if ("secretVersion".equals(fieldName)) { + deserializedAzureKeyVaultSecretReference.secretVersion = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureKeyVaultSecretReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java index 392b2bd44da2..c25efa96ab97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLBatchExecutionActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLBatchExecutionActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure ML Batch Execution activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMLBatchExecutionActivity.class, - visible = true) -@JsonTypeName("AzureMLBatchExecution") @Fluent public final class AzureMLBatchExecutionActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMLBatchExecution"; /* * Azure ML Batch Execution activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMLBatchExecutionActivityTypeProperties innerTypeProperties = new AzureMLBatchExecutionActivityTypeProperties(); @@ -235,4 +227,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLBatchExecutionActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLBatchExecutionActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLBatchExecutionActivity if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLBatchExecutionActivity. + */ + public static AzureMLBatchExecutionActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLBatchExecutionActivity deserializedAzureMLBatchExecutionActivity + = new AzureMLBatchExecutionActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAzureMLBatchExecutionActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAzureMLBatchExecutionActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.innerTypeProperties + = AzureMLBatchExecutionActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMLBatchExecutionActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMLBatchExecutionActivity.withAdditionalProperties(additionalProperties); + + return deserializedAzureMLBatchExecutionActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLExecutePipelineActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLExecutePipelineActivity.java index 6e5f2fb40434..9411b35e943f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLExecutePipelineActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLExecutePipelineActivity.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLExecutePipelineActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure ML Execute Pipeline activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMLExecutePipelineActivity.class, - visible = true) -@JsonTypeName("AzureMLExecutePipeline") @Fluent public final class AzureMLExecutePipelineActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMLExecutePipeline"; /* * Azure ML Execute Pipeline activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMLExecutePipelineActivityTypeProperties innerTypeProperties = new AzureMLExecutePipelineActivityTypeProperties(); @@ -365,4 +358,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLExecutePipelineActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLExecutePipelineActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLExecutePipelineActivity if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLExecutePipelineActivity. + */ + public static AzureMLExecutePipelineActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLExecutePipelineActivity deserializedAzureMLExecutePipelineActivity + = new AzureMLExecutePipelineActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAzureMLExecutePipelineActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAzureMLExecutePipelineActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.innerTypeProperties + = AzureMLExecutePipelineActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMLExecutePipelineActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMLExecutePipelineActivity.withAdditionalProperties(additionalProperties); + + return deserializedAzureMLExecutePipelineActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java index f39516265f54..9528ba469738 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure ML Studio Web Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMLLinkedService.class, visible = true) -@JsonTypeName("AzureML") @Fluent public final class AzureMLLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureML"; /* * Azure ML Studio Web Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMLLinkedServiceTypeProperties innerTypeProperties = new AzureMLLinkedServiceTypeProperties(); /** @@ -311,4 +307,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLLinkedService. + */ + public static AzureMLLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLLinkedService deserializedAzureMLLinkedService = new AzureMLLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureMLLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMLLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMLLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMLLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMLLinkedService.innerTypeProperties + = AzureMLLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMLLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMLLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureMLLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java index 78712e47d9e8..eb9391e86651 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLServiceLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLServiceLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure ML Service linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMLServiceLinkedService.class, - visible = true) -@JsonTypeName("AzureMLService") @Fluent public final class AzureMLServiceLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMLService"; /* * Azure ML Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMLServiceLinkedServiceTypeProperties innerTypeProperties = new AzureMLServiceLinkedServiceTypeProperties(); @@ -318,4 +310,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLServiceLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLServiceLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLServiceLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLServiceLinkedService. + */ + public static AzureMLServiceLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLServiceLinkedService deserializedAzureMLServiceLinkedService = new AzureMLServiceLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureMLServiceLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMLServiceLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMLServiceLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMLServiceLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMLServiceLinkedService.innerTypeProperties + = AzureMLServiceLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMLServiceLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMLServiceLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureMLServiceLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java index aaf808f076f0..c3260c0eb925 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLUpdateResourceActivity.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMLUpdateResourceActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Azure ML Update Resource management activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMLUpdateResourceActivity.class, - visible = true) -@JsonTypeName("AzureMLUpdateResource") @Fluent public final class AzureMLUpdateResourceActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMLUpdateResource"; /* * Azure ML Update Resource management activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMLUpdateResourceActivityTypeProperties innerTypeProperties = new AzureMLUpdateResourceActivityTypeProperties(); @@ -231,4 +224,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLUpdateResourceActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLUpdateResourceActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLUpdateResourceActivity if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLUpdateResourceActivity. + */ + public static AzureMLUpdateResourceActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLUpdateResourceActivity deserializedAzureMLUpdateResourceActivity + = new AzureMLUpdateResourceActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedAzureMLUpdateResourceActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedAzureMLUpdateResourceActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.innerTypeProperties + = AzureMLUpdateResourceActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMLUpdateResourceActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMLUpdateResourceActivity.withAdditionalProperties(additionalProperties); + + return deserializedAzureMLUpdateResourceActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java index a67ae3a02a10..a56a18e520ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMLWebServiceFile.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure ML WebService Input/Output file. */ @Fluent -public final class AzureMLWebServiceFile { +public final class AzureMLWebServiceFile implements JsonSerializable { /* * The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "filePath", required = true) private Object filePath; /* * Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /** @@ -96,4 +98,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMLWebServiceFile.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("filePath", this.filePath); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMLWebServiceFile from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMLWebServiceFile if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMLWebServiceFile. + */ + public static AzureMLWebServiceFile fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMLWebServiceFile deserializedAzureMLWebServiceFile = new AzureMLWebServiceFile(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("filePath".equals(fieldName)) { + deserializedAzureMLWebServiceFile.filePath = reader.readUntyped(); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMLWebServiceFile.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureMLWebServiceFile; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java index affed074719d..88c884c63f3f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMariaDBLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Database for MariaDB linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMariaDBLinkedService.class, - visible = true) -@JsonTypeName("AzureMariaDB") @Fluent public final class AzureMariaDBLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMariaDB"; /* * Azure Database for MariaDB linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMariaDBLinkedServiceTypeProperties innerTypeProperties = new AzureMariaDBLinkedServiceTypeProperties(); /** @@ -190,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMariaDBLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMariaDBLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMariaDBLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMariaDBLinkedService. + */ + public static AzureMariaDBLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMariaDBLinkedService deserializedAzureMariaDBLinkedService = new AzureMariaDBLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureMariaDBLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMariaDBLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMariaDBLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMariaDBLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMariaDBLinkedService.innerTypeProperties + = AzureMariaDBLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMariaDBLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMariaDBLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureMariaDBLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java index 192d95856e91..a704e9b8a839 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure MariaDB source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMariaDBSource.class, visible = true) -@JsonTypeName("AzureMariaDBSource") @Fluent public final class AzureMariaDBSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMariaDBSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public AzureMariaDBSource withDisableMetricsCollection(Object disableMetricsColl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMariaDBSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMariaDBSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMariaDBSource. + */ + public static AzureMariaDBSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMariaDBSource deserializedAzureMariaDBSource = new AzureMariaDBSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureMariaDBSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureMariaDBSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureMariaDBSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureMariaDBSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzureMariaDBSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzureMariaDBSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureMariaDBSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAzureMariaDBSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMariaDBSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureMariaDBSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java index 85c756db716c..cfc08e2e36b1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMariaDBTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Database for MariaDB dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMariaDBTableDataset.class, - visible = true) -@JsonTypeName("AzureMariaDBTable") @Fluent public final class AzureMariaDBTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMariaDBTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMariaDBTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMariaDBTableDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMariaDBTableDataset. + */ + public static AzureMariaDBTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMariaDBTableDataset deserializedAzureMariaDBTableDataset = new AzureMariaDBTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMariaDBTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMariaDBTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMariaDBTableDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMariaDBTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureMariaDBTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java index 8601fb93e55c..917fa14ec001 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure MySQL database linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureMySqlLinkedService.class, - visible = true) -@JsonTypeName("AzureMySql") @Fluent public final class AzureMySqlLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMySql"; /* * Azure MySQL database linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMySqlLinkedServiceTypeProperties innerTypeProperties = new AzureMySqlLinkedServiceTypeProperties(); /** @@ -190,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMySqlLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMySqlLinkedService. + */ + public static AzureMySqlLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlLinkedService deserializedAzureMySqlLinkedService = new AzureMySqlLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureMySqlLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMySqlLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMySqlLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMySqlLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMySqlLinkedService.innerTypeProperties + = AzureMySqlLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMySqlLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMySqlLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureMySqlLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java index 9ea141ce4d8a..0127452a37ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure MySql sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlSink.class, visible = true) -@JsonTypeName("AzureMySqlSink") @Fluent public final class AzureMySqlSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMySqlSink"; /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -131,4 +128,72 @@ public AzureMySqlSink withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMySqlSink. + */ + public static AzureMySqlSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlSink deserializedAzureMySqlSink = new AzureMySqlSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureMySqlSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureMySqlSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureMySqlSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureMySqlSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureMySqlSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureMySqlSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureMySqlSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedAzureMySqlSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMySqlSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureMySqlSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java index 6d9910fe891b..2790e8e0ff99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure MySQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlSource.class, visible = true) -@JsonTypeName("AzureMySqlSource") @Fluent public final class AzureMySqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMySqlSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public AzureMySqlSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureMySqlSource. + */ + public static AzureMySqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlSource deserializedAzureMySqlSource = new AzureMySqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureMySqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureMySqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureMySqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureMySqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzureMySqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzureMySqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureMySqlSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAzureMySqlSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMySqlSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureMySqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java index ac4966e75bc4..753271bdb271 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureMySqlTableDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure MySQL database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureMySqlTableDataset.class, visible = true) -@JsonTypeName("AzureMySqlTable") @Fluent public final class AzureMySqlTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureMySqlTable"; /* * Azure MySQL database dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureMySqlTableDatasetTypeProperties innerTypeProperties = new AzureMySqlTableDatasetTypeProperties(); /** @@ -190,4 +186,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureMySqlTableDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureMySqlTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureMySqlTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureMySqlTableDataset. + */ + public static AzureMySqlTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureMySqlTableDataset deserializedAzureMySqlTableDataset = new AzureMySqlTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureMySqlTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureMySqlTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureMySqlTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureMySqlTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureMySqlTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureMySqlTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureMySqlTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureMySqlTableDataset.innerTypeProperties + = AzureMySqlTableDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureMySqlTableDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureMySqlTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureMySqlTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java index b73e7493523d..0b41f5392ca1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure PostgreSQL linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzurePostgreSqlLinkedService.class, - visible = true) -@JsonTypeName("AzurePostgreSql") @Fluent public final class AzurePostgreSqlLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzurePostgreSql"; /* * Azure PostgreSQL linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzurePostgreSqlLinkedServiceTypeProperties innerTypeProperties = new AzurePostgreSqlLinkedServiceTypeProperties(); @@ -191,4 +183,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzurePostgreSqlLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzurePostgreSqlLinkedService. + */ + public static AzurePostgreSqlLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlLinkedService deserializedAzurePostgreSqlLinkedService = new AzurePostgreSqlLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzurePostgreSqlLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzurePostgreSqlLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedService.innerTypeProperties + = AzurePostgreSqlLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzurePostgreSqlLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzurePostgreSqlLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzurePostgreSqlLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java index 15d1ec8f392a..6a0c41ee8e5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure PostgreSQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzurePostgreSqlSink.class, visible = true) -@JsonTypeName("AzurePostgreSqlSink") @Fluent public final class AzurePostgreSqlSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzurePostgreSqlSink"; /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -131,4 +128,72 @@ public AzurePostgreSqlSink withDisableMetricsCollection(Object disableMetricsCol public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzurePostgreSqlSink. + */ + public static AzurePostgreSqlSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlSink deserializedAzurePostgreSqlSink = new AzurePostgreSqlSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzurePostgreSqlSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzurePostgreSqlSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedAzurePostgreSqlSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzurePostgreSqlSink.withAdditionalProperties(additionalProperties); + + return deserializedAzurePostgreSqlSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java index 1a29d7966809..903e12771fa1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure PostgreSQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzurePostgreSqlSource.class, visible = true) -@JsonTypeName("AzurePostgreSqlSource") @Fluent public final class AzurePostgreSqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzurePostgreSqlSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public AzurePostgreSqlSource withDisableMetricsCollection(Object disableMetricsC public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzurePostgreSqlSource. + */ + public static AzurePostgreSqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlSource deserializedAzurePostgreSqlSource = new AzurePostgreSqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzurePostgreSqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzurePostgreSqlSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedAzurePostgreSqlSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzurePostgreSqlSource.withAdditionalProperties(additionalProperties); + + return deserializedAzurePostgreSqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java index 0cf004aa8cb1..aee04ae66b8f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzurePostgreSqlTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure PostgreSQL dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzurePostgreSqlTableDataset.class, - visible = true) -@JsonTypeName("AzurePostgreSqlTable") @Fluent public final class AzurePostgreSqlTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzurePostgreSqlTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private AzurePostgreSqlTableDatasetTypeProperties innerTypeProperties; /** @@ -212,4 +204,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzurePostgreSqlTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzurePostgreSqlTableDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzurePostgreSqlTableDataset. + */ + public static AzurePostgreSqlTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzurePostgreSqlTableDataset deserializedAzurePostgreSqlTableDataset = new AzurePostgreSqlTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzurePostgreSqlTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzurePostgreSqlTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzurePostgreSqlTableDataset.innerTypeProperties + = AzurePostgreSqlTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzurePostgreSqlTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzurePostgreSqlTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java index 0555df47b823..de929ce0a408 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureQueueSink.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Queue sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureQueueSink.class, visible = true) -@JsonTypeName("AzureQueueSink") @Fluent public final class AzureQueueSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureQueueSink"; /** @@ -103,4 +101,69 @@ public AzureQueueSink withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureQueueSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureQueueSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureQueueSink. + */ + public static AzureQueueSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureQueueSink deserializedAzureQueueSink = new AzureQueueSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureQueueSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureQueueSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureQueueSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureQueueSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureQueueSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureQueueSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureQueueSink.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureQueueSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureQueueSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java index e60b95042bfe..6133b503e8f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSearchIndexDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure Search Index. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSearchIndexDataset.class, - visible = true) -@JsonTypeName("AzureSearchIndex") @Fluent public final class AzureSearchIndexDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSearchIndex"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSearchIndexDatasetTypeProperties innerTypeProperties = new AzureSearchIndexDatasetTypeProperties(); /** @@ -169,4 +161,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSearchIndexDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSearchIndexDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSearchIndexDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSearchIndexDataset. + */ + public static AzureSearchIndexDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSearchIndexDataset deserializedAzureSearchIndexDataset = new AzureSearchIndexDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureSearchIndexDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSearchIndexDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureSearchIndexDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureSearchIndexDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSearchIndexDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSearchIndexDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureSearchIndexDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSearchIndexDataset.innerTypeProperties + = AzureSearchIndexDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSearchIndexDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSearchIndexDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureSearchIndexDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java index 28db9d9c009d..a2992c67408b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Search Index sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSearchIndexSink.class, visible = true) -@JsonTypeName("AzureSearchIndexSink") @Fluent public final class AzureSearchIndexSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSearchIndexSink"; /* * Specify the write behavior when upserting documents into Azure Search Index. */ - @JsonProperty(value = "writeBehavior") private AzureSearchIndexWriteBehaviorType writeBehavior; /** @@ -129,4 +126,73 @@ public AzureSearchIndexSink withDisableMetricsCollection(Object disableMetricsCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSearchIndexSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSearchIndexSink if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSearchIndexSink. + */ + public static AzureSearchIndexSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSearchIndexSink deserializedAzureSearchIndexSink = new AzureSearchIndexSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureSearchIndexSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureSearchIndexSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureSearchIndexSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureSearchIndexSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureSearchIndexSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureSearchIndexSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureSearchIndexSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedAzureSearchIndexSink.writeBehavior + = AzureSearchIndexWriteBehaviorType.fromString(reader.getString()); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSearchIndexSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureSearchIndexSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexWriteBehaviorType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexWriteBehaviorType.java index 69d4d30709ec..9714bd99f4f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexWriteBehaviorType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchIndexWriteBehaviorType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public AzureSearchIndexWriteBehaviorType() { * @param name a name to look for. * @return the corresponding AzureSearchIndexWriteBehaviorType. */ - @JsonCreator public static AzureSearchIndexWriteBehaviorType fromString(String name) { return fromString(name, AzureSearchIndexWriteBehaviorType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java index 312aed59c4d9..9ec40e2af98f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSearchLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSearchLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Windows Azure Search Service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSearchLinkedService.class, - visible = true) -@JsonTypeName("AzureSearch") @Fluent public final class AzureSearchLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSearch"; /* * Windows Azure Search Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSearchLinkedServiceTypeProperties innerTypeProperties = new AzureSearchLinkedServiceTypeProperties(); /** @@ -188,4 +180,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSearchLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSearchLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSearchLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSearchLinkedService. + */ + public static AzureSearchLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSearchLinkedService deserializedAzureSearchLinkedService = new AzureSearchLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureSearchLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSearchLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSearchLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSearchLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSearchLinkedService.innerTypeProperties + = AzureSearchLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSearchLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSearchLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureSearchLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWAuthenticationType.java index 3a651b13ce25..db8942d65295 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -49,7 +48,6 @@ public AzureSqlDWAuthenticationType() { * @param name a name to look for. * @return the corresponding AzureSqlDWAuthenticationType. */ - @JsonCreator public static AzureSqlDWAuthenticationType fromString(String name) { return fromString(name, AzureSqlDWAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java index 589ce711dc7e..52c3e0248613 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure SQL Data Warehouse linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSqlDWLinkedService.class, - visible = true) -@JsonTypeName("AzureSqlDW") @Fluent public final class AzureSqlDWLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlDW"; /* * Azure SQL Data Warehouse linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSqlDWLinkedServiceTypeProperties innerTypeProperties = new AzureSqlDWLinkedServiceTypeProperties(); /** @@ -918,4 +910,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSqlDWLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDWLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDWLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlDWLinkedService. + */ + public static AzureSqlDWLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDWLinkedService deserializedAzureSqlDWLinkedService = new AzureSqlDWLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureSqlDWLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlDWLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlDWLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlDWLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlDWLinkedService.innerTypeProperties + = AzureSqlDWLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlDWLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlDWLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlDWLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java index 6e984315953c..e8a4d18f1443 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDWTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure SQL Data Warehouse dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlDWTableDataset.class, visible = true) -@JsonTypeName("AzureSqlDWTable") @Fluent public final class AzureSqlDWTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlDWTable"; /* * Azure SQL Data Warehouse dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureSqlDWTableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDWTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDWTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlDWTableDataset. + */ + public static AzureSqlDWTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDWTableDataset deserializedAzureSqlDWTableDataset = new AzureSqlDWTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlDWTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlDWTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlDWTableDataset.innerTypeProperties + = AzureSqlDWTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlDWTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlDWTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseAuthenticationType.java index a968f69283ca..63c82d70a4fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -49,7 +48,6 @@ public AzureSqlDatabaseAuthenticationType() { * @param name a name to look for. * @return the corresponding AzureSqlDatabaseAuthenticationType. */ - @JsonCreator public static AzureSqlDatabaseAuthenticationType fromString(String name) { return fromString(name, AzureSqlDatabaseAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java index 9ff3941fe627..6abfa831773c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlDatabaseLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDatabaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Azure SQL Database linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSqlDatabaseLinkedService.class, - visible = true) -@JsonTypeName("AzureSqlDatabase") @Fluent public final class AzureSqlDatabaseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlDatabase"; /* * Azure SQL Database linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSqlDatabaseLinkedServiceTypeProperties innerTypeProperties = new AzureSqlDatabaseLinkedServiceTypeProperties(); @@ -943,4 +935,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSqlDatabaseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlDatabaseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlDatabaseLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlDatabaseLinkedService. + */ + public static AzureSqlDatabaseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlDatabaseLinkedService deserializedAzureSqlDatabaseLinkedService + = new AzureSqlDatabaseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlDatabaseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlDatabaseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedService.innerTypeProperties + = AzureSqlDatabaseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlDatabaseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlDatabaseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlDatabaseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMIAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMIAuthenticationType.java index 25340e95c679..8816cd2e9894 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMIAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMIAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -49,7 +48,6 @@ public AzureSqlMIAuthenticationType() { * @param name a name to look for. * @return the corresponding AzureSqlMIAuthenticationType. */ - @JsonCreator public static AzureSqlMIAuthenticationType fromString(String name) { return fromString(name, AzureSqlMIAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java index c4aa1aac7b2e..6ce306ac5eb6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMILinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMILinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure SQL Managed Instance linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSqlMILinkedService.class, - visible = true) -@JsonTypeName("AzureSqlMI") @Fluent public final class AzureSqlMILinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlMI"; /* * Azure SQL Managed Instance linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSqlMILinkedServiceTypeProperties innerTypeProperties = new AzureSqlMILinkedServiceTypeProperties(); /** @@ -941,4 +933,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSqlMILinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlMILinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlMILinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlMILinkedService. + */ + public static AzureSqlMILinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlMILinkedService deserializedAzureSqlMILinkedService = new AzureSqlMILinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureSqlMILinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlMILinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlMILinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlMILinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlMILinkedService.innerTypeProperties + = AzureSqlMILinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlMILinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlMILinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlMILinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java index 2a10b4be14db..bfd1336740b2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlMITableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMITableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure SQL Managed Instance dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlMITableDataset.class, visible = true) -@JsonTypeName("AzureSqlMITable") @Fluent public final class AzureSqlMITableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlMITable"; /* * Azure SQL Managed Instance dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureSqlMITableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlMITableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlMITableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlMITableDataset. + */ + public static AzureSqlMITableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlMITableDataset deserializedAzureSqlMITableDataset = new AzureSqlMITableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureSqlMITableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlMITableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureSqlMITableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlMITableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlMITableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlMITableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureSqlMITableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlMITableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlMITableDataset.innerTypeProperties + = AzureSqlMITableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlMITableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlMITableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java index 507524b38fbe..e7eee9242f29 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSink.java @@ -5,79 +5,68 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure SQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlSink.class, visible = true) -@JsonTypeName("AzureSqlSink") @Fluent public final class AzureSqlSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlSink"; /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterStoredProcedureName") private Object sqlWriterStoredProcedureName; /* * SQL writer table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterTableType") private Object sqlWriterTableType; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * SQL stored procedure parameters. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "storedProcedureTableTypeParameterName") private Object storedProcedureTableTypeParameterName; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "sqlWriterUseTableLock") private Object sqlWriterUseTableLock; /* * Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType * SqlWriteBehaviorEnum) */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /* * SQL upsert settings. */ - @JsonProperty(value = "upsertSettings") private SqlUpsertSettings upsertSettings; /** @@ -352,4 +341,97 @@ public void validate() { upsertSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlWriterStoredProcedureName", this.sqlWriterStoredProcedureName); + jsonWriter.writeUntypedField("sqlWriterTableType", this.sqlWriterTableType); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("storedProcedureTableTypeParameterName", + this.storedProcedureTableTypeParameterName); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("sqlWriterUseTableLock", this.sqlWriterUseTableLock); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + jsonWriter.writeJsonField("upsertSettings", this.upsertSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlSink. + */ + public static AzureSqlSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlSink deserializedAzureSqlSink = new AzureSqlSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureSqlSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureSqlSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureSqlSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureSqlSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureSqlSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureSqlSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlSink.type = reader.getString(); + } else if ("sqlWriterStoredProcedureName".equals(fieldName)) { + deserializedAzureSqlSink.sqlWriterStoredProcedureName = reader.readUntyped(); + } else if ("sqlWriterTableType".equals(fieldName)) { + deserializedAzureSqlSink.sqlWriterTableType = reader.readUntyped(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedAzureSqlSink.preCopyScript = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedAzureSqlSink.storedProcedureParameters = reader.readUntyped(); + } else if ("storedProcedureTableTypeParameterName".equals(fieldName)) { + deserializedAzureSqlSink.storedProcedureTableTypeParameterName = reader.readUntyped(); + } else if ("tableOption".equals(fieldName)) { + deserializedAzureSqlSink.tableOption = reader.readUntyped(); + } else if ("sqlWriterUseTableLock".equals(fieldName)) { + deserializedAzureSqlSink.sqlWriterUseTableLock = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedAzureSqlSink.writeBehavior = reader.readUntyped(); + } else if ("upsertSettings".equals(fieldName)) { + deserializedAzureSqlSink.upsertSettings = SqlUpsertSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java index 2947628a55d4..d3085da25ed5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure SQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlSource.class, visible = true) -@JsonTypeName("AzureSqlSource") @Fluent public final class AzureSqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlSource"; /* * SQL reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -48,26 +43,22 @@ public final class AzureSqlSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * Which additional types to produce. */ - @JsonProperty(value = "produceAdditionalTypes") private Object produceAdditionalTypes; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -304,4 +295,90 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("produceAdditionalTypes", this.produceAdditionalTypes); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSqlSource. + */ + public static AzureSqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlSource deserializedAzureSqlSource = new AzureSqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureSqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureSqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureSqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureSqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzureSqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzureSqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedAzureSqlSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedAzureSqlSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedAzureSqlSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedAzureSqlSource.isolationLevel = reader.readUntyped(); + } else if ("produceAdditionalTypes".equals(fieldName)) { + deserializedAzureSqlSource.produceAdditionalTypes = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedAzureSqlSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedAzureSqlSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java index e391fef98d1c..e0f6130c11d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSqlTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure SQL Server database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureSqlTableDataset.class, visible = true) -@JsonTypeName("AzureSqlTable") @Fluent public final class AzureSqlTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSqlTable"; /* * Azure SQL dataset properties. */ - @JsonProperty(value = "typeProperties") private AzureSqlTableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSqlTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSqlTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSqlTableDataset. + */ + public static AzureSqlTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSqlTableDataset deserializedAzureSqlTableDataset = new AzureSqlTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureSqlTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSqlTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureSqlTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureSqlTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSqlTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSqlTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureSqlTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedAzureSqlTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSqlTableDataset.innerTypeProperties + = AzureSqlTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSqlTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureSqlTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageAuthenticationType.java index 4316b8b50151..5ac647724b78 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public AzureStorageAuthenticationType() { * @param name a name to look for. * @return the corresponding AzureStorageAuthenticationType. */ - @JsonCreator public static AzureStorageAuthenticationType fromString(String name) { return fromString(name, AzureStorageAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java index cebec3151399..55b8b55f42e8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureStorageLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The storage account linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureStorageLinkedService.class, - visible = true) -@JsonTypeName("AzureStorage") @Fluent public final class AzureStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureStorage"; /* * Azure Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureStorageLinkedServiceTypeProperties innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); /** @@ -238,4 +230,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureStorageLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureStorageLinkedService. + */ + public static AzureStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureStorageLinkedService deserializedAzureStorageLinkedService = new AzureStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureStorageLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureStorageLinkedService.innerTypeProperties + = AzureStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java index 762faaa8604a..2e8875c23cc7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureSynapseArtifactsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Azure Synapse Analytics (Artifacts) linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureSynapseArtifactsLinkedService.class, - visible = true) -@JsonTypeName("AzureSynapseArtifacts") @Fluent public final class AzureSynapseArtifactsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureSynapseArtifacts"; /* * Azure Synapse Analytics (Artifacts) linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureSynapseArtifactsLinkedServiceTypeProperties innerTypeProperties = new AzureSynapseArtifactsLinkedServiceTypeProperties(); @@ -195,4 +187,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureSynapseArtifactsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSynapseArtifactsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSynapseArtifactsLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureSynapseArtifactsLinkedService. + */ + public static AzureSynapseArtifactsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSynapseArtifactsLinkedService deserializedAzureSynapseArtifactsLinkedService + = new AzureSynapseArtifactsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureSynapseArtifactsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureSynapseArtifactsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedService.innerTypeProperties + = AzureSynapseArtifactsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureSynapseArtifactsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureSynapseArtifactsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureSynapseArtifactsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java index 9a4775f77e43..f67725680ac0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.AzureTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Azure Table storage dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableDataset.class, visible = true) -@JsonTypeName("AzureTable") @Fluent public final class AzureTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureTable"; /* * Azure Table dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private AzureTableDatasetTypeProperties innerTypeProperties = new AzureTableDatasetTypeProperties(); /** @@ -165,4 +161,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureTableDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureTableDataset. + */ + public static AzureTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableDataset deserializedAzureTableDataset = new AzureTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedAzureTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedAzureTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedAzureTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedAzureTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureTableDataset.innerTypeProperties + = AzureTableDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureTableDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedAzureTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java index 26b64ea16a62..85b16b5cdb0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSink.java @@ -5,47 +5,41 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Table sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableSink.class, visible = true) -@JsonTypeName("AzureTableSink") @Fluent public final class AzureTableSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureTableSink"; /* * Azure Table default partition key value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "azureTableDefaultPartitionKeyValue") private Object azureTableDefaultPartitionKeyValue; /* * Azure Table partition key name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "azureTablePartitionKeyName") private Object azureTablePartitionKeyName; /* * Azure Table row key name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "azureTableRowKeyName") private Object azureTableRowKeyName; /* * Azure Table insert type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "azureTableInsertType") private Object azureTableInsertType; /** @@ -215,4 +209,81 @@ public AzureTableSink withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("azureTableDefaultPartitionKeyValue", this.azureTableDefaultPartitionKeyValue); + jsonWriter.writeUntypedField("azureTablePartitionKeyName", this.azureTablePartitionKeyName); + jsonWriter.writeUntypedField("azureTableRowKeyName", this.azureTableRowKeyName); + jsonWriter.writeUntypedField("azureTableInsertType", this.azureTableInsertType); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureTableSink. + */ + public static AzureTableSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableSink deserializedAzureTableSink = new AzureTableSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedAzureTableSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedAzureTableSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedAzureTableSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedAzureTableSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureTableSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureTableSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureTableSink.type = reader.getString(); + } else if ("azureTableDefaultPartitionKeyValue".equals(fieldName)) { + deserializedAzureTableSink.azureTableDefaultPartitionKeyValue = reader.readUntyped(); + } else if ("azureTablePartitionKeyName".equals(fieldName)) { + deserializedAzureTableSink.azureTablePartitionKeyName = reader.readUntyped(); + } else if ("azureTableRowKeyName".equals(fieldName)) { + deserializedAzureTableSink.azureTableRowKeyName = reader.readUntyped(); + } else if ("azureTableInsertType".equals(fieldName)) { + deserializedAzureTableSink.azureTableInsertType = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureTableSink.withAdditionalProperties(additionalProperties); + + return deserializedAzureTableSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java index b09b39169365..caab34658790 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableSource.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Table source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = AzureTableSource.class, visible = true) -@JsonTypeName("AzureTableSource") @Fluent public final class AzureTableSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureTableSource"; /* * Azure Table source query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "azureTableSourceQuery") private Object azureTableSourceQuery; /* * Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "azureTableSourceIgnoreTableNotFound") private Object azureTableSourceIgnoreTableNotFound; /** @@ -159,4 +155,75 @@ public AzureTableSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("azureTableSourceQuery", this.azureTableSourceQuery); + jsonWriter.writeUntypedField("azureTableSourceIgnoreTableNotFound", this.azureTableSourceIgnoreTableNotFound); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureTableSource. + */ + public static AzureTableSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableSource deserializedAzureTableSource = new AzureTableSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedAzureTableSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedAzureTableSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedAzureTableSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedAzureTableSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedAzureTableSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedAzureTableSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedAzureTableSource.type = reader.getString(); + } else if ("azureTableSourceQuery".equals(fieldName)) { + deserializedAzureTableSource.azureTableSourceQuery = reader.readUntyped(); + } else if ("azureTableSourceIgnoreTableNotFound".equals(fieldName)) { + deserializedAzureTableSource.azureTableSourceIgnoreTableNotFound = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureTableSource.withAdditionalProperties(additionalProperties); + + return deserializedAzureTableSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java index e38e2a8995e3..2387e03f46d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureTableStorageLinkedService.java @@ -6,37 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.azure.resourcemanager.datafactory.fluent.models.AzureStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.datafactory.fluent.models.AzureTableStorageLinkedServiceTypeProperties; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The azure table storage linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = AzureTableStorageLinkedService.class, - visible = true) -@JsonTypeName("AzureTableStorage") @Fluent public final class AzureTableStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "AzureTableStorage"; /* * Azure Table Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) - private AzureStorageLinkedServiceTypeProperties innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + private AzureTableStorageLinkedServiceTypeProperties innerTypeProperties + = new AzureTableStorageLinkedServiceTypeProperties(); /** * Creates an instance of AzureTableStorageLinkedService class. @@ -59,7 +52,7 @@ public String type() { * * @return the innerTypeProperties value. */ - private AzureStorageLinkedServiceTypeProperties innerTypeProperties() { + private AzureTableStorageLinkedServiceTypeProperties innerTypeProperties() { return this.innerTypeProperties; } @@ -99,6 +92,54 @@ public AzureTableStorageLinkedService withAnnotations(List annotations) return this; } + /** + * Get the serviceEndpoint property: Table service endpoint of the Azure Table Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @return the serviceEndpoint value. + */ + public Object serviceEndpoint() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().serviceEndpoint(); + } + + /** + * Set the serviceEndpoint property: Table service endpoint of the Azure Table Storage resource. It is mutually + * exclusive with connectionString, sasUri property. + * + * @param serviceEndpoint the serviceEndpoint value to set. + * @return the AzureTableStorageLinkedService object itself. + */ + public AzureTableStorageLinkedService withServiceEndpoint(Object serviceEndpoint) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withServiceEndpoint(serviceEndpoint); + return this; + } + + /** + * Get the credential property: The credential reference containing authentication information. + * + * @return the credential value. + */ + public CredentialReference credential() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().credential(); + } + + /** + * Set the credential property: The credential reference containing authentication information. + * + * @param credential the credential value to set. + * @return the AzureTableStorageLinkedService object itself. + */ + public AzureTableStorageLinkedService withCredential(CredentialReference credential) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withCredential(credential); + return this; + } + /** * Get the connectionString property: The connection string. It is mutually exclusive with sasUri property. Type: * string, SecureString or AzureKeyVaultSecretReference. @@ -118,7 +159,7 @@ public Object connectionString() { */ public AzureTableStorageLinkedService withConnectionString(Object connectionString) { if (this.innerTypeProperties() == null) { - this.innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); } this.innerTypeProperties().withConnectionString(connectionString); return this; @@ -141,7 +182,7 @@ public AzureKeyVaultSecretReference accountKey() { */ public AzureTableStorageLinkedService withAccountKey(AzureKeyVaultSecretReference accountKey) { if (this.innerTypeProperties() == null) { - this.innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); } this.innerTypeProperties().withAccountKey(accountKey); return this; @@ -166,7 +207,7 @@ public Object sasUri() { */ public AzureTableStorageLinkedService withSasUri(Object sasUri) { if (this.innerTypeProperties() == null) { - this.innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); } this.innerTypeProperties().withSasUri(sasUri); return this; @@ -189,7 +230,7 @@ public AzureKeyVaultSecretReference sasToken() { */ public AzureTableStorageLinkedService withSasToken(AzureKeyVaultSecretReference sasToken) { if (this.innerTypeProperties() == null) { - this.innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); } this.innerTypeProperties().withSasToken(sasToken); return this; @@ -214,7 +255,7 @@ public String encryptedCredential() { */ public AzureTableStorageLinkedService withEncryptedCredential(String encryptedCredential) { if (this.innerTypeProperties() == null) { - this.innerTypeProperties = new AzureStorageLinkedServiceTypeProperties(); + this.innerTypeProperties = new AzureTableStorageLinkedServiceTypeProperties(); } this.innerTypeProperties().withEncryptedCredential(encryptedCredential); return this; @@ -238,4 +279,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(AzureTableStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureTableStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureTableStorageLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the AzureTableStorageLinkedService. + */ + public static AzureTableStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureTableStorageLinkedService deserializedAzureTableStorageLinkedService + = new AzureTableStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedAzureTableStorageLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedAzureTableStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedAzureTableStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedAzureTableStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedAzureTableStorageLinkedService.innerTypeProperties + = AzureTableStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedAzureTableStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedAzureTableStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedAzureTableStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java index f280dca1e8db..5f8f66a59228 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Big data pool reference type. */ @Fluent -public final class BigDataPoolParametrizationReference { +public final class BigDataPoolParametrizationReference + implements JsonSerializable { /* * Big data pool reference type. */ - @JsonProperty(value = "type", required = true) private BigDataPoolReferenceType type; /* * Reference big data pool name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "referenceName", required = true) private Object referenceName; /** @@ -92,4 +95,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BigDataPoolParametrizationReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BigDataPoolParametrizationReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BigDataPoolParametrizationReference if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BigDataPoolParametrizationReference. + */ + public static BigDataPoolParametrizationReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BigDataPoolParametrizationReference deserializedBigDataPoolParametrizationReference + = new BigDataPoolParametrizationReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedBigDataPoolParametrizationReference.type + = BigDataPoolReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedBigDataPoolParametrizationReference.referenceName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedBigDataPoolParametrizationReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java index cdbd4391cf3f..ec1753d9e3bd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public BigDataPoolReferenceType() { * @param name a name to look for. * @return the corresponding BigDataPoolReferenceType. */ - @JsonCreator public static BigDataPoolReferenceType fromString(String name) { return fromString(name, BigDataPoolReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java index 2b01d5e2c588..f90626cc26ae 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.BinaryDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Binary dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinaryDataset.class, visible = true) -@JsonTypeName("Binary") @Fluent public final class BinaryDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Binary"; /* * Binary dataset properties. */ - @JsonProperty(value = "typeProperties") private BinaryDatasetTypeProperties innerTypeProperties; /** @@ -179,4 +175,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BinaryDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BinaryDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BinaryDataset. + */ + public static BinaryDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BinaryDataset deserializedBinaryDataset = new BinaryDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedBinaryDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedBinaryDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedBinaryDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedBinaryDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedBinaryDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedBinaryDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedBinaryDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedBinaryDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedBinaryDataset.innerTypeProperties = BinaryDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBinaryDataset.withAdditionalProperties(additionalProperties); + + return deserializedBinaryDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java index c95d5798e90d..1fd740db6703 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinaryReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Binary read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinaryReadSettings.class, visible = true) -@JsonTypeName("BinaryReadSettings") @Fluent public final class BinaryReadSettings extends FormatReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BinaryReadSettings"; /* * Compression settings. */ - @JsonProperty(value = "compressionProperties") private CompressionReadSettings compressionProperties; /** @@ -78,4 +75,54 @@ public void validate() { compressionProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("compressionProperties", this.compressionProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BinaryReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BinaryReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the BinaryReadSettings. + */ + public static BinaryReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BinaryReadSettings deserializedBinaryReadSettings = new BinaryReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedBinaryReadSettings.type = reader.getString(); + } else if ("compressionProperties".equals(fieldName)) { + deserializedBinaryReadSettings.compressionProperties = CompressionReadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBinaryReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedBinaryReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java index f3a78d49b888..accc6687a0d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Binary sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinarySink.class, visible = true) -@JsonTypeName("BinarySink") @Fluent public final class BinarySink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BinarySink"; /* * Binary store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /** @@ -132,4 +129,72 @@ public void validate() { storeSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BinarySink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BinarySink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the BinarySink. + */ + public static BinarySink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BinarySink deserializedBinarySink = new BinarySink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedBinarySink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedBinarySink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedBinarySink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedBinarySink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedBinarySink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedBinarySink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedBinarySink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedBinarySink.storeSettings = StoreWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBinarySink.withAdditionalProperties(additionalProperties); + + return deserializedBinarySink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java index 7b0a5b075698..26946eb81f63 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BinarySource.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Binary source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BinarySource.class, visible = true) -@JsonTypeName("BinarySource") @Fluent public final class BinarySource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BinarySource"; /* * Binary store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Binary format settings. */ - @JsonProperty(value = "formatSettings") private BinaryReadSettings formatSettings; /** @@ -143,4 +139,69 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BinarySource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BinarySource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the BinarySource. + */ + public static BinarySource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BinarySource deserializedBinarySource = new BinarySource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedBinarySource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedBinarySource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedBinarySource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedBinarySource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedBinarySource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedBinarySource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedBinarySource.formatSettings = BinaryReadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBinarySource.withAdditionalProperties(additionalProperties); + + return deserializedBinarySource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventTypes.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventTypes.java index 0309c4ec6982..830303441d06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventTypes.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventTypes.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public BlobEventTypes() { * @param name a name to look for. * @return the corresponding BlobEventTypes. */ - @JsonCreator public static BlobEventTypes fromString(String name) { return fromString(name, BlobEventTypes.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java index 8915d6425999..f1bb5c49b7ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobEventsTrigger.java @@ -6,33 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.BlobEventsTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that runs every time a Blob event occurs. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobEventsTrigger.class, visible = true) -@JsonTypeName("BlobEventsTrigger") @Fluent public final class BlobEventsTrigger extends MultiplePipelineTrigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BlobEventsTrigger"; /* * Blob Events Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private BlobEventsTriggerTypeProperties innerTypeProperties = new BlobEventsTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of BlobEventsTrigger class. */ @@ -58,6 +60,17 @@ private BlobEventsTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -226,4 +239,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BlobEventsTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("pipelines", pipelines(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobEventsTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobEventsTrigger if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BlobEventsTrigger. + */ + public static BlobEventsTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobEventsTrigger deserializedBlobEventsTrigger = new BlobEventsTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedBlobEventsTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedBlobEventsTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedBlobEventsTrigger.withAnnotations(annotations); + } else if ("pipelines".equals(fieldName)) { + List pipelines + = reader.readArray(reader1 -> TriggerPipelineReference.fromJson(reader1)); + deserializedBlobEventsTrigger.withPipelines(pipelines); + } else if ("typeProperties".equals(fieldName)) { + deserializedBlobEventsTrigger.innerTypeProperties + = BlobEventsTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedBlobEventsTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBlobEventsTrigger.withAdditionalProperties(additionalProperties); + + return deserializedBlobEventsTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java index 8c486c9a6c53..ae0f5cdc84c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSink.java @@ -5,55 +5,48 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * A copy activity Azure Blob sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobSink.class, visible = true) -@JsonTypeName("BlobSink") @Fluent public final class BlobSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BlobSink"; /* * Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "blobWriterOverwriteFiles") private Object blobWriterOverwriteFiles; /* * Blob writer date time format. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "blobWriterDateTimeFormat") private Object blobWriterDateTimeFormat; /* * Blob writer add header. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "blobWriterAddHeader") private Object blobWriterAddHeader; /* * The type of copy behavior for copy sink. */ - @JsonProperty(value = "copyBehavior") private Object copyBehavior; /* * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array * of objects). */ - @JsonProperty(value = "metadata") private List metadata; /** @@ -246,4 +239,85 @@ public void validate() { metadata().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("blobWriterOverwriteFiles", this.blobWriterOverwriteFiles); + jsonWriter.writeUntypedField("blobWriterDateTimeFormat", this.blobWriterDateTimeFormat); + jsonWriter.writeUntypedField("blobWriterAddHeader", this.blobWriterAddHeader); + jsonWriter.writeUntypedField("copyBehavior", this.copyBehavior); + jsonWriter.writeArrayField("metadata", this.metadata, (writer, element) -> writer.writeJson(element)); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the BlobSink. + */ + public static BlobSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobSink deserializedBlobSink = new BlobSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedBlobSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedBlobSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedBlobSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedBlobSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedBlobSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedBlobSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedBlobSink.type = reader.getString(); + } else if ("blobWriterOverwriteFiles".equals(fieldName)) { + deserializedBlobSink.blobWriterOverwriteFiles = reader.readUntyped(); + } else if ("blobWriterDateTimeFormat".equals(fieldName)) { + deserializedBlobSink.blobWriterDateTimeFormat = reader.readUntyped(); + } else if ("blobWriterAddHeader".equals(fieldName)) { + deserializedBlobSink.blobWriterAddHeader = reader.readUntyped(); + } else if ("copyBehavior".equals(fieldName)) { + deserializedBlobSink.copyBehavior = reader.readUntyped(); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedBlobSink.metadata = metadata; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBlobSink.withAdditionalProperties(additionalProperties); + + return deserializedBlobSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java index 8c2fa2ea511f..cb2e66a15d99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure Blob source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobSource.class, visible = true) -@JsonTypeName("BlobSource") @Fluent public final class BlobSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BlobSource"; /* * Treat empty as null. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "treatEmptyAsNull") private Object treatEmptyAsNull; /* * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "skipHeaderLineCount") private Object skipHeaderLineCount; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /** @@ -168,4 +163,72 @@ public BlobSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("treatEmptyAsNull", this.treatEmptyAsNull); + jsonWriter.writeUntypedField("skipHeaderLineCount", this.skipHeaderLineCount); + jsonWriter.writeUntypedField("recursive", this.recursive); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the BlobSource. + */ + public static BlobSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobSource deserializedBlobSource = new BlobSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedBlobSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedBlobSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedBlobSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedBlobSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedBlobSource.type = reader.getString(); + } else if ("treatEmptyAsNull".equals(fieldName)) { + deserializedBlobSource.treatEmptyAsNull = reader.readUntyped(); + } else if ("skipHeaderLineCount".equals(fieldName)) { + deserializedBlobSource.skipHeaderLineCount = reader.readUntyped(); + } else if ("recursive".equals(fieldName)) { + deserializedBlobSource.recursive = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBlobSource.withAdditionalProperties(additionalProperties); + + return deserializedBlobSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java index 7ce43eeb279f..b009180bd5fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BlobTrigger.java @@ -6,33 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.BlobTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that runs every time the selected Blob container changes. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BlobTrigger.class, visible = true) -@JsonTypeName("BlobTrigger") @Fluent public final class BlobTrigger extends MultiplePipelineTrigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "BlobTrigger"; /* * Blob Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private BlobTriggerTypeProperties innerTypeProperties = new BlobTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of BlobTrigger class. */ @@ -58,6 +60,17 @@ private BlobTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -172,4 +185,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(BlobTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("pipelines", pipelines(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of BlobTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of BlobTrigger if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the BlobTrigger. + */ + public static BlobTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + BlobTrigger deserializedBlobTrigger = new BlobTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedBlobTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedBlobTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedBlobTrigger.withAnnotations(annotations); + } else if ("pipelines".equals(fieldName)) { + List pipelines + = reader.readArray(reader1 -> TriggerPipelineReference.fromJson(reader1)); + deserializedBlobTrigger.withPipelines(pipelines); + } else if ("typeProperties".equals(fieldName)) { + deserializedBlobTrigger.innerTypeProperties = BlobTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedBlobTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedBlobTrigger.withAdditionalProperties(additionalProperties); + + return deserializedBlobTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java index d53990dfa146..16ce6d16f51d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CassandraLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Cassandra data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraLinkedService.class, visible = true) -@JsonTypeName("Cassandra") @Fluent public final class CassandraLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Cassandra"; /* * Cassandra linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private CassandraLinkedServiceTypeProperties innerTypeProperties = new CassandraLinkedServiceTypeProperties(); /** @@ -255,4 +251,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CassandraLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CassandraLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CassandraLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CassandraLinkedService. + */ + public static CassandraLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CassandraLinkedService deserializedCassandraLinkedService = new CassandraLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCassandraLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCassandraLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCassandraLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCassandraLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCassandraLinkedService.innerTypeProperties + = CassandraLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCassandraLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCassandraLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCassandraLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java index 243210678e83..35566c058cd8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSource.java @@ -5,30 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for a Cassandra database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraSource.class, visible = true) -@JsonTypeName("CassandraSource") @Fluent public final class CassandraSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CassandraSource"; /* * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -37,7 +34,6 @@ public final class CassandraSource extends TabularSource { * read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is * case-insensitive. */ - @JsonProperty(value = "consistencyLevel") private CassandraSourceReadConsistencyLevels consistencyLevel; /** @@ -167,4 +163,77 @@ public CassandraSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeStringField("consistencyLevel", + this.consistencyLevel == null ? null : this.consistencyLevel.toString()); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CassandraSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CassandraSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the CassandraSource. + */ + public static CassandraSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CassandraSource deserializedCassandraSource = new CassandraSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedCassandraSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCassandraSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCassandraSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCassandraSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedCassandraSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedCassandraSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCassandraSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedCassandraSource.query = reader.readUntyped(); + } else if ("consistencyLevel".equals(fieldName)) { + deserializedCassandraSource.consistencyLevel + = CassandraSourceReadConsistencyLevels.fromString(reader.getString()); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCassandraSource.withAdditionalProperties(additionalProperties); + + return deserializedCassandraSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSourceReadConsistencyLevels.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSourceReadConsistencyLevels.java index 5d46ca7dbda6..4b140014306a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSourceReadConsistencyLevels.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraSourceReadConsistencyLevels.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -80,7 +79,6 @@ public CassandraSourceReadConsistencyLevels() { * @param name a name to look for. * @return the corresponding CassandraSourceReadConsistencyLevels. */ - @JsonCreator public static CassandraSourceReadConsistencyLevels fromString(String name) { return fromString(name, CassandraSourceReadConsistencyLevels.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java index 9557011e2df8..68d99ff5d550 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CassandraTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CassandraTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Cassandra database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CassandraTableDataset.class, visible = true) -@JsonTypeName("CassandraTable") @Fluent public final class CassandraTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CassandraTable"; /* * Cassandra dataset properties. */ - @JsonProperty(value = "typeProperties") private CassandraTableDatasetTypeProperties innerTypeProperties; /** @@ -183,4 +179,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CassandraTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CassandraTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CassandraTableDataset. + */ + public static CassandraTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CassandraTableDataset deserializedCassandraTableDataset = new CassandraTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCassandraTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCassandraTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCassandraTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCassandraTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCassandraTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCassandraTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCassandraTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedCassandraTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedCassandraTableDataset.innerTypeProperties + = CassandraTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCassandraTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedCassandraTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java index ccc70960c81c..e9642f432766 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChainingTrigger.java @@ -6,12 +6,14 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ChainingTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. @@ -19,29 +21,28 @@ * runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all * upstream pipeline runs. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ChainingTrigger.class, visible = true) -@JsonTypeName("ChainingTrigger") @Fluent public final class ChainingTrigger extends Trigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ChainingTrigger"; /* * Pipeline for which runs are created when all upstream pipelines complete successfully. */ - @JsonProperty(value = "pipeline", required = true) private TriggerPipelineReference pipeline; /* * Chaining Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private ChainingTriggerTypeProperties innerTypeProperties = new ChainingTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of ChainingTrigger class. */ @@ -87,6 +88,17 @@ private ChainingTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -175,4 +187,67 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ChainingTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("pipeline", this.pipeline); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChainingTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChainingTrigger if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ChainingTrigger. + */ + public static ChainingTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChainingTrigger deserializedChainingTrigger = new ChainingTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedChainingTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedChainingTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedChainingTrigger.withAnnotations(annotations); + } else if ("pipeline".equals(fieldName)) { + deserializedChainingTrigger.pipeline = TriggerPipelineReference.fromJson(reader); + } else if ("typeProperties".equals(fieldName)) { + deserializedChainingTrigger.innerTypeProperties = ChainingTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedChainingTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedChainingTrigger.withAdditionalProperties(additionalProperties); + + return deserializedChainingTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureFolder.java index 214236e556bf..84c6bd42311c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureFolder.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The folder that this CDC is in. If not specified, CDC will appear at the root level. */ @Fluent -public final class ChangeDataCaptureFolder { +public final class ChangeDataCaptureFolder implements JsonSerializable { /* * The name of the folder that this CDC is in. */ - @JsonProperty(value = "name") private String name; /** @@ -51,4 +54,40 @@ public ChangeDataCaptureFolder withName(String name) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChangeDataCaptureFolder from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChangeDataCaptureFolder if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ChangeDataCaptureFolder. + */ + public static ChangeDataCaptureFolder fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChangeDataCaptureFolder deserializedChangeDataCaptureFolder = new ChangeDataCaptureFolder(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedChangeDataCaptureFolder.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedChangeDataCaptureFolder; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java index 199ed7b0b651..88028239b49c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ChangeDataCaptureListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCaptureResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of change data capture resources. */ @Fluent -public final class ChangeDataCaptureListResponse { +public final class ChangeDataCaptureListResponse implements JsonSerializable { /* * Lists all resources of type change data capture. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ChangeDataCaptureListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ChangeDataCaptureListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ChangeDataCaptureListResponse if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ChangeDataCaptureListResponse. + */ + public static ChangeDataCaptureListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ChangeDataCaptureListResponse deserializedChangeDataCaptureListResponse + = new ChangeDataCaptureListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> ChangeDataCaptureResourceInner.fromJson(reader1)); + deserializedChangeDataCaptureListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedChangeDataCaptureListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedChangeDataCaptureListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java index 5a0965381e5b..7bc36097c229 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmdkeySetup.java @@ -6,30 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CmdkeySetupTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; /** * The custom setup of running cmdkey commands. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CmdkeySetup.class, visible = true) -@JsonTypeName("CmdkeySetup") @Fluent public final class CmdkeySetup extends CustomSetupBase { /* * The type of custom setup. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CmdkeySetup"; /* * Cmdkey command custom setup type properties. */ - @JsonProperty(value = "typeProperties", required = true) private CmdkeySetupTypeProperties innerTypeProperties = new CmdkeySetupTypeProperties(); /** @@ -144,4 +139,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CmdkeySetup.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CmdkeySetup from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CmdkeySetup if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CmdkeySetup. + */ + public static CmdkeySetup fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CmdkeySetup deserializedCmdkeySetup = new CmdkeySetup(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("typeProperties".equals(fieldName)) { + deserializedCmdkeySetup.innerTypeProperties = CmdkeySetupTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCmdkeySetup.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCmdkeySetup; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmkIdentityDefinition.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmkIdentityDefinition.java index 881bd0b6aa4f..d1e01e83e6a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmkIdentityDefinition.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CmkIdentityDefinition.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Managed Identity used for CMK. */ @Fluent -public final class CmkIdentityDefinition { +public final class CmkIdentityDefinition implements JsonSerializable { /* * The resource id of the user assigned identity to authenticate to customer's key vault. */ - @JsonProperty(value = "userAssignedIdentity") private String userAssignedIdentity; /** @@ -53,4 +56,40 @@ public CmkIdentityDefinition withUserAssignedIdentity(String userAssignedIdentit */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("userAssignedIdentity", this.userAssignedIdentity); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CmkIdentityDefinition from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CmkIdentityDefinition if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the CmkIdentityDefinition. + */ + public static CmkIdentityDefinition fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CmkIdentityDefinition deserializedCmkIdentityDefinition = new CmkIdentityDefinition(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("userAssignedIdentity".equals(fieldName)) { + deserializedCmkIdentityDefinition.userAssignedIdentity = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCmkIdentityDefinition; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java index 10e405b40a90..f9dae7b68f46 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsEntityDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsEntityDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Common Data Service for Apps entity dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CommonDataServiceForAppsEntityDataset.class, - visible = true) -@JsonTypeName("CommonDataServiceForAppsEntity") @Fluent public final class CommonDataServiceForAppsEntityDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CommonDataServiceForAppsEntity"; /* * Common Data Service for Apps entity dataset properties. */ - @JsonProperty(value = "typeProperties") private CommonDataServiceForAppsEntityDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsEntityDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsEntityDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsEntityDataset. + */ + public static CommonDataServiceForAppsEntityDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsEntityDataset deserializedCommonDataServiceForAppsEntityDataset + = new CommonDataServiceForAppsEntityDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCommonDataServiceForAppsEntityDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCommonDataServiceForAppsEntityDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedCommonDataServiceForAppsEntityDataset.innerTypeProperties + = CommonDataServiceForAppsEntityDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCommonDataServiceForAppsEntityDataset.withAdditionalProperties(additionalProperties); + + return deserializedCommonDataServiceForAppsEntityDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java index 20c3122cb5bb..87a99fe9dc93 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Common Data Service for Apps linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CommonDataServiceForAppsLinkedService.class, - visible = true) -@JsonTypeName("CommonDataServiceForApps") @Fluent public final class CommonDataServiceForAppsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CommonDataServiceForApps"; /* * Common Data Service for Apps linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private CommonDataServiceForAppsLinkedServiceTypeProperties innerTypeProperties = new CommonDataServiceForAppsLinkedServiceTypeProperties(); @@ -234,7 +226,8 @@ public CommonDataServiceForAppsLinkedService withOrganizationName(Object organiz /** * Get the authenticationType property: The authentication type to connect to Common Data Service for Apps server. * 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for - * Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + * Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: + * string (or Expression with resultType string). * * @return the authenticationType value. */ @@ -245,7 +238,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Common Data Service for Apps server. * 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for - * Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + * Server-To-Server authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: + * string (or Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the CommonDataServiceForAppsLinkedService object itself. @@ -258,6 +252,31 @@ public CommonDataServiceForAppsLinkedService withAuthenticationType(Object authe return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().domain(); + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the CommonDataServiceForAppsLinkedService object itself. + */ + public CommonDataServiceForAppsLinkedService withDomain(Object domain) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new CommonDataServiceForAppsLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withDomain(domain); + return this; + } + /** * Get the username property: User name to access the Common Data Service for Apps instance. Type: string (or * Expression with resultType string). @@ -431,4 +450,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CommonDataServiceForAppsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsLinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsLinkedService. + */ + public static CommonDataServiceForAppsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsLinkedService deserializedCommonDataServiceForAppsLinkedService + = new CommonDataServiceForAppsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCommonDataServiceForAppsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCommonDataServiceForAppsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedService.innerTypeProperties + = CommonDataServiceForAppsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCommonDataServiceForAppsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCommonDataServiceForAppsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCommonDataServiceForAppsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java index 720e4b8f7fa8..f9bf70fabe3d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSink.java @@ -6,47 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Common Data Service for Apps sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CommonDataServiceForAppsSink.class, - visible = true) -@JsonTypeName("CommonDataServiceForAppsSink") @Fluent public final class CommonDataServiceForAppsSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CommonDataServiceForAppsSink"; /* * The write behavior for the operation. */ - @JsonProperty(value = "writeBehavior", required = true) private DynamicsSinkWriteBehavior writeBehavior; /* * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. * Default is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; /** @@ -199,4 +190,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CommonDataServiceForAppsSink.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + jsonWriter.writeUntypedField("alternateKeyName", this.alternateKeyName); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsSink if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsSink. + */ + public static CommonDataServiceForAppsSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsSink deserializedCommonDataServiceForAppsSink = new CommonDataServiceForAppsSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("writeBehavior".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.writeBehavior + = DynamicsSinkWriteBehavior.fromString(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.type = reader.getString(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.ignoreNullValues = reader.readUntyped(); + } else if ("alternateKeyName".equals(fieldName)) { + deserializedCommonDataServiceForAppsSink.alternateKeyName = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCommonDataServiceForAppsSink.withAdditionalProperties(additionalProperties); + + return deserializedCommonDataServiceForAppsSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java index b443224d96eb..63e7f7a98e9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CommonDataServiceForAppsSource.java @@ -5,41 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Common Data Service for Apps source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CommonDataServiceForAppsSource.class, - visible = true) -@JsonTypeName("CommonDataServiceForAppsSource") @Fluent public final class CommonDataServiceForAppsSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CommonDataServiceForAppsSource"; /* * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & * on-premises). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -147,4 +139,70 @@ public CommonDataServiceForAppsSource withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CommonDataServiceForAppsSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CommonDataServiceForAppsSource if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CommonDataServiceForAppsSource. + */ + public static CommonDataServiceForAppsSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CommonDataServiceForAppsSource deserializedCommonDataServiceForAppsSource + = new CommonDataServiceForAppsSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedCommonDataServiceForAppsSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCommonDataServiceForAppsSource.withAdditionalProperties(additionalProperties); + + return deserializedCommonDataServiceForAppsSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java index 0c37debc2832..754e8a58ce31 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ComponentSetup.java @@ -6,30 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.LicensedComponentSetupTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; /** * The custom setup of installing 3rd party components. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ComponentSetup.class, visible = true) -@JsonTypeName("ComponentSetup") @Fluent public final class ComponentSetup extends CustomSetupBase { /* * The type of custom setup. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ComponentSetup"; /* * Install 3rd party component type properties. */ - @JsonProperty(value = "typeProperties", required = true) private LicensedComponentSetupTypeProperties innerTypeProperties = new LicensedComponentSetupTypeProperties(); /** @@ -121,4 +116,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ComponentSetup.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ComponentSetup from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ComponentSetup if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ComponentSetup. + */ + public static ComponentSetup fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ComponentSetup deserializedComponentSetup = new ComponentSetup(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("typeProperties".equals(fieldName)) { + deserializedComponentSetup.innerTypeProperties + = LicensedComponentSetupTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedComponentSetup.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedComponentSetup; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java index f774bbaae127..a6c40f8b55ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CompressionReadSettings.java @@ -5,43 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Compression read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CompressionReadSettings.class, - visible = true) -@JsonTypeName("CompressionReadSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "ZipDeflateReadSettings", value = ZipDeflateReadSettings.class), - @JsonSubTypes.Type(name = "TarReadSettings", value = TarReadSettings.class), - @JsonSubTypes.Type(name = "TarGZipReadSettings", value = TarGZipReadSettings.class) }) @Fluent -public class CompressionReadSettings { +public class CompressionReadSettings implements JsonSerializable { /* * The Compression setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CompressionReadSettings"; /* * Compression read settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -64,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -80,14 +63,6 @@ public CompressionReadSettings withAdditionalProperties(Map addi return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -95,4 +70,80 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CompressionReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CompressionReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the CompressionReadSettings. + */ + public static CompressionReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("ZipDeflateReadSettings".equals(discriminatorValue)) { + return ZipDeflateReadSettings.fromJson(readerToUse.reset()); + } else if ("TarReadSettings".equals(discriminatorValue)) { + return TarReadSettings.fromJson(readerToUse.reset()); + } else if ("TarGZipReadSettings".equals(discriminatorValue)) { + return TarGZipReadSettings.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static CompressionReadSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CompressionReadSettings deserializedCompressionReadSettings = new CompressionReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCompressionReadSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCompressionReadSettings.additionalProperties = additionalProperties; + + return deserializedCompressionReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java index d46ebf04d0b4..1a187aff2ea4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ConcurLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Concur Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurLinkedService.class, visible = true) -@JsonTypeName("Concur") @Fluent public final class ConcurLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Concur"; /* * Concur Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ConcurLinkedServiceTypeProperties innerTypeProperties = new ConcurLinkedServiceTypeProperties(); /** @@ -307,4 +303,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ConcurLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConcurLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConcurLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ConcurLinkedService. + */ + public static ConcurLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConcurLinkedService deserializedConcurLinkedService = new ConcurLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedConcurLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedConcurLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedConcurLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedConcurLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedConcurLinkedService.innerTypeProperties + = ConcurLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedConcurLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedConcurLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedConcurLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java index 2d02aa40d819..90e7c4eeab85 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Concur Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurObjectDataset.class, visible = true) -@JsonTypeName("ConcurObject") @Fluent public final class ConcurObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ConcurObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConcurObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConcurObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ConcurObjectDataset. + */ + public static ConcurObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConcurObjectDataset deserializedConcurObjectDataset = new ConcurObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedConcurObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedConcurObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedConcurObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedConcurObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedConcurObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedConcurObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedConcurObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedConcurObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedConcurObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedConcurObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedConcurObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java index c2b6d0de8c94..0cf89b089f7b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConcurSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Concur Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ConcurSource.class, visible = true) -@JsonTypeName("ConcurSource") @Fluent public final class ConcurSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ConcurSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ConcurSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConcurSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConcurSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ConcurSource. + */ + public static ConcurSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConcurSource deserializedConcurSource = new ConcurSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedConcurSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedConcurSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedConcurSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedConcurSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedConcurSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedConcurSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedConcurSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedConcurSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedConcurSource.withAdditionalProperties(additionalProperties); + + return deserializedConcurSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java index ac71f5ce70c0..4d6a085251b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConfigurationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public ConfigurationType() { * @param name a name to look for. * @return the corresponding ConfigurationType. */ - @JsonCreator public static ConfigurationType fromString(String name) { return fromString(name, ConfigurationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionStateProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionStateProperties.java index 0393c1c14b74..3cdfd27b8c9f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionStateProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionStateProperties.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The connection state of a managed private endpoint. */ @Immutable -public final class ConnectionStateProperties { +public final class ConnectionStateProperties implements JsonSerializable { /* * The actions required on the managed private endpoint */ - @JsonProperty(value = "actionsRequired", access = JsonProperty.Access.WRITE_ONLY) private String actionsRequired; /* * The managed private endpoint description */ - @JsonProperty(value = "description", access = JsonProperty.Access.WRITE_ONLY) private String description; /* * The approval status */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private String status; /** @@ -70,4 +71,43 @@ public String status() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConnectionStateProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConnectionStateProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ConnectionStateProperties. + */ + public static ConnectionStateProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConnectionStateProperties deserializedConnectionStateProperties = new ConnectionStateProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("actionsRequired".equals(fieldName)) { + deserializedConnectionStateProperties.actionsRequired = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedConnectionStateProperties.description = reader.getString(); + } else if ("status".equals(fieldName)) { + deserializedConnectionStateProperties.status = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedConnectionStateProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionType.java index e9ea8e06e559..1b4fa0d21634 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ConnectionType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public ConnectionType() { * @param name a name to look for. * @return the corresponding ConnectionType. */ - @JsonCreator public static ConnectionType fromString(String name) { return fromString(name, ConnectionType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ContinuationSettingsReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ContinuationSettingsReference.java index 5c02a6a69446..e5befe36cad5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ContinuationSettingsReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ContinuationSettingsReference.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Continuation settings for execute data flow activity. */ @Fluent -public final class ContinuationSettingsReference { +public final class ContinuationSettingsReference implements JsonSerializable { /* * Continuation TTL in minutes. */ - @JsonProperty(value = "continuationTtlInMinutes") private Object continuationTtlInMinutes; /* * Idle condition. */ - @JsonProperty(value = "idleCondition") private Object idleCondition; /* * Customized checkpoint key. */ - @JsonProperty(value = "customizedCheckpointKey") private Object customizedCheckpointKey; /** @@ -103,4 +104,47 @@ public ContinuationSettingsReference withCustomizedCheckpointKey(Object customiz */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("continuationTtlInMinutes", this.continuationTtlInMinutes); + jsonWriter.writeUntypedField("idleCondition", this.idleCondition); + jsonWriter.writeUntypedField("customizedCheckpointKey", this.customizedCheckpointKey); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ContinuationSettingsReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ContinuationSettingsReference if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ContinuationSettingsReference. + */ + public static ContinuationSettingsReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ContinuationSettingsReference deserializedContinuationSettingsReference + = new ContinuationSettingsReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("continuationTtlInMinutes".equals(fieldName)) { + deserializedContinuationSettingsReference.continuationTtlInMinutes = reader.readUntyped(); + } else if ("idleCondition".equals(fieldName)) { + deserializedContinuationSettingsReference.idleCondition = reader.readUntyped(); + } else if ("customizedCheckpointKey".equals(fieldName)) { + deserializedContinuationSettingsReference.customizedCheckpointKey = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedContinuationSettingsReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java index 325e9f073665..b9903066b4a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ControlActivity.java @@ -5,38 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Base class for all control activities like IfCondition, ForEach , Until. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ControlActivity.class, visible = true) -@JsonTypeName("Container") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "ExecutePipeline", value = ExecutePipelineActivity.class), - @JsonSubTypes.Type(name = "IfCondition", value = IfConditionActivity.class), - @JsonSubTypes.Type(name = "Switch", value = SwitchActivity.class), - @JsonSubTypes.Type(name = "ForEach", value = ForEachActivity.class), - @JsonSubTypes.Type(name = "Wait", value = WaitActivity.class), - @JsonSubTypes.Type(name = "Fail", value = FailActivity.class), - @JsonSubTypes.Type(name = "Until", value = UntilActivity.class), - @JsonSubTypes.Type(name = "Validation", value = ValidationActivity.class), - @JsonSubTypes.Type(name = "Filter", value = FilterActivity.class), - @JsonSubTypes.Type(name = "SetVariable", value = SetVariableActivity.class), - @JsonSubTypes.Type(name = "AppendVariable", value = AppendVariableActivity.class), - @JsonSubTypes.Type(name = "WebHook", value = WebhookActivity.class) }) @Fluent public class ControlActivity extends Activity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Container"; /** @@ -118,4 +102,122 @@ public ControlActivity withUserProperties(List userProperties) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ControlActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ControlActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ControlActivity. + */ + public static ControlActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("ExecutePipeline".equals(discriminatorValue)) { + return ExecutePipelineActivity.fromJson(readerToUse.reset()); + } else if ("IfCondition".equals(discriminatorValue)) { + return IfConditionActivity.fromJson(readerToUse.reset()); + } else if ("Switch".equals(discriminatorValue)) { + return SwitchActivity.fromJson(readerToUse.reset()); + } else if ("ForEach".equals(discriminatorValue)) { + return ForEachActivity.fromJson(readerToUse.reset()); + } else if ("Wait".equals(discriminatorValue)) { + return WaitActivity.fromJson(readerToUse.reset()); + } else if ("Fail".equals(discriminatorValue)) { + return FailActivity.fromJson(readerToUse.reset()); + } else if ("Until".equals(discriminatorValue)) { + return UntilActivity.fromJson(readerToUse.reset()); + } else if ("Validation".equals(discriminatorValue)) { + return ValidationActivity.fromJson(readerToUse.reset()); + } else if ("Filter".equals(discriminatorValue)) { + return FilterActivity.fromJson(readerToUse.reset()); + } else if ("SetVariable".equals(discriminatorValue)) { + return SetVariableActivity.fromJson(readerToUse.reset()); + } else if ("AppendVariable".equals(discriminatorValue)) { + return AppendVariableActivity.fromJson(readerToUse.reset()); + } else if ("WebHook".equals(discriminatorValue)) { + return WebhookActivity.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static ControlActivity fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ControlActivity deserializedControlActivity = new ControlActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedControlActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedControlActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedControlActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedControlActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedControlActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedControlActivity.withUserProperties(userProperties); + } else if ("type".equals(fieldName)) { + deserializedControlActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedControlActivity.withAdditionalProperties(additionalProperties); + + return deserializedControlActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java index 3c0293733996..14e242a7406d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivity.java @@ -6,43 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CopyActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Copy activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopyActivity.class, visible = true) -@JsonTypeName("Copy") @Fluent public final class CopyActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Copy"; /* * Copy activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private CopyActivityTypeProperties innerTypeProperties = new CopyActivityTypeProperties(); /* * List of inputs for the activity. */ - @JsonProperty(value = "inputs") private List inputs; /* * List of outputs for the activity. */ - @JsonProperty(value = "outputs") private List outputs; /** @@ -566,4 +561,92 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CopyActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeArrayField("inputs", this.inputs, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("outputs", this.outputs, (writer, element) -> writer.writeJson(element)); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopyActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopyActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CopyActivity. + */ + public static CopyActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopyActivity deserializedCopyActivity = new CopyActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedCopyActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedCopyActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedCopyActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedCopyActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedCopyActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedCopyActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedCopyActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedCopyActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedCopyActivity.innerTypeProperties = CopyActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCopyActivity.type = reader.getString(); + } else if ("inputs".equals(fieldName)) { + List inputs = reader.readArray(reader1 -> DatasetReference.fromJson(reader1)); + deserializedCopyActivity.inputs = inputs; + } else if ("outputs".equals(fieldName)) { + List outputs = reader.readArray(reader1 -> DatasetReference.fromJson(reader1)); + deserializedCopyActivity.outputs = outputs; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCopyActivity.withAdditionalProperties(additionalProperties); + + return deserializedCopyActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivityLogSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivityLogSettings.java index 051e6965900d..b2efb95e1d73 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivityLogSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyActivityLogSettings.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Settings for copy activity log. */ @Fluent -public final class CopyActivityLogSettings { +public final class CopyActivityLogSettings implements JsonSerializable { /* * Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logLevel") private Object logLevel; /* * Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableReliableLogging") private Object enableReliableLogging; /** @@ -81,4 +83,43 @@ public CopyActivityLogSettings withEnableReliableLogging(Object enableReliableLo */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("logLevel", this.logLevel); + jsonWriter.writeUntypedField("enableReliableLogging", this.enableReliableLogging); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopyActivityLogSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopyActivityLogSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the CopyActivityLogSettings. + */ + public static CopyActivityLogSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopyActivityLogSettings deserializedCopyActivityLogSettings = new CopyActivityLogSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("logLevel".equals(fieldName)) { + deserializedCopyActivityLogSettings.logLevel = reader.readUntyped(); + } else if ("enableReliableLogging".equals(fieldName)) { + deserializedCopyActivityLogSettings.enableReliableLogging = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedCopyActivityLogSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyComputeScaleProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyComputeScaleProperties.java index 6d9e00aa7c12..a232edf0a289 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyComputeScaleProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyComputeScaleProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,23 +17,20 @@ * CopyComputeScale properties for managed integration runtime. */ @Fluent -public final class CopyComputeScaleProperties { +public final class CopyComputeScaleProperties implements JsonSerializable { /* * DIU number setting reserved for copy activity execution. Supported values are multiples of 4 in range 4-256. */ - @JsonProperty(value = "dataIntegrationUnit") private Integer dataIntegrationUnit; /* * Time to live (in minutes) setting of integration runtime which will execute copy activity. */ - @JsonProperty(value = "timeToLive") private Integer timeToLive; /* * CopyComputeScale properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -90,7 +88,6 @@ public CopyComputeScaleProperties withTimeToLive(Integer timeToLive) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -106,14 +103,6 @@ public CopyComputeScaleProperties withAdditionalProperties(Map a return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -121,4 +110,54 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("dataIntegrationUnit", this.dataIntegrationUnit); + jsonWriter.writeNumberField("timeToLive", this.timeToLive); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopyComputeScaleProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopyComputeScaleProperties if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CopyComputeScaleProperties. + */ + public static CopyComputeScaleProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopyComputeScaleProperties deserializedCopyComputeScaleProperties = new CopyComputeScaleProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataIntegrationUnit".equals(fieldName)) { + deserializedCopyComputeScaleProperties.dataIntegrationUnit = reader.getNullable(JsonReader::getInt); + } else if ("timeToLive".equals(fieldName)) { + deserializedCopyComputeScaleProperties.timeToLive = reader.getNullable(JsonReader::getInt); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCopyComputeScaleProperties.additionalProperties = additionalProperties; + + return deserializedCopyComputeScaleProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java index f3ad410b0a6e..b8205018c528 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySink.java @@ -5,120 +5,61 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * A copy activity sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopySink.class, visible = true) -@JsonTypeName("CopySink") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "DelimitedTextSink", value = DelimitedTextSink.class), - @JsonSubTypes.Type(name = "JsonSink", value = JsonSink.class), - @JsonSubTypes.Type(name = "OrcSink", value = OrcSink.class), - @JsonSubTypes.Type(name = "RestSink", value = RestSink.class), - @JsonSubTypes.Type(name = "AzurePostgreSqlSink", value = AzurePostgreSqlSink.class), - @JsonSubTypes.Type(name = "AzureMySqlSink", value = AzureMySqlSink.class), - @JsonSubTypes.Type(name = "AzureDatabricksDeltaLakeSink", value = AzureDatabricksDeltaLakeSink.class), - @JsonSubTypes.Type(name = "WarehouseSink", value = WarehouseSink.class), - @JsonSubTypes.Type(name = "SapCloudForCustomerSink", value = SapCloudForCustomerSink.class), - @JsonSubTypes.Type(name = "AzureQueueSink", value = AzureQueueSink.class), - @JsonSubTypes.Type(name = "AzureTableSink", value = AzureTableSink.class), - @JsonSubTypes.Type(name = "AvroSink", value = AvroSink.class), - @JsonSubTypes.Type(name = "ParquetSink", value = ParquetSink.class), - @JsonSubTypes.Type(name = "BinarySink", value = BinarySink.class), - @JsonSubTypes.Type(name = "BlobSink", value = BlobSink.class), - @JsonSubTypes.Type(name = "FileSystemSink", value = FileSystemSink.class), - @JsonSubTypes.Type(name = "DocumentDbCollectionSink", value = DocumentDbCollectionSink.class), - @JsonSubTypes.Type(name = "CosmosDbSqlApiSink", value = CosmosDbSqlApiSink.class), - @JsonSubTypes.Type(name = "SqlSink", value = SqlSink.class), - @JsonSubTypes.Type(name = "SqlServerSink", value = SqlServerSink.class), - @JsonSubTypes.Type(name = "AzureSqlSink", value = AzureSqlSink.class), - @JsonSubTypes.Type(name = "SqlMISink", value = SqlMISink.class), - @JsonSubTypes.Type(name = "SqlDWSink", value = SqlDWSink.class), - @JsonSubTypes.Type(name = "SnowflakeSink", value = SnowflakeSink.class), - @JsonSubTypes.Type(name = "SnowflakeV2Sink", value = SnowflakeV2Sink.class), - @JsonSubTypes.Type(name = "OracleSink", value = OracleSink.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreSink", value = AzureDataLakeStoreSink.class), - @JsonSubTypes.Type(name = "AzureBlobFSSink", value = AzureBlobFSSink.class), - @JsonSubTypes.Type(name = "AzureSearchIndexSink", value = AzureSearchIndexSink.class), - @JsonSubTypes.Type(name = "OdbcSink", value = OdbcSink.class), - @JsonSubTypes.Type(name = "InformixSink", value = InformixSink.class), - @JsonSubTypes.Type(name = "MicrosoftAccessSink", value = MicrosoftAccessSink.class), - @JsonSubTypes.Type(name = "DynamicsSink", value = DynamicsSink.class), - @JsonSubTypes.Type(name = "DynamicsCrmSink", value = DynamicsCrmSink.class), - @JsonSubTypes.Type(name = "CommonDataServiceForAppsSink", value = CommonDataServiceForAppsSink.class), - @JsonSubTypes.Type(name = "AzureDataExplorerSink", value = AzureDataExplorerSink.class), - @JsonSubTypes.Type(name = "SalesforceSink", value = SalesforceSink.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudSink", value = SalesforceServiceCloudSink.class), - @JsonSubTypes.Type(name = "MongoDbAtlasSink", value = MongoDbAtlasSink.class), - @JsonSubTypes.Type(name = "MongoDbV2Sink", value = MongoDbV2Sink.class), - @JsonSubTypes.Type(name = "CosmosDbMongoDbApiSink", value = CosmosDbMongoDbApiSink.class), - @JsonSubTypes.Type(name = "LakeHouseTableSink", value = LakeHouseTableSink.class), - @JsonSubTypes.Type(name = "SalesforceV2Sink", value = SalesforceV2Sink.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudV2Sink", value = SalesforceServiceCloudV2Sink.class) }) @Fluent -public class CopySink { +public class CopySink implements JsonSerializable { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CopySink"; /* * Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "writeBatchSize") private Object writeBatchSize; /* * Write batch timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "writeBatchTimeout") private Object writeBatchTimeout; /* * Sink retry count. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "sinkRetryCount") private Object sinkRetryCount; /* * Sink retry wait. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "sinkRetryWait") private Object sinkRetryWait; /* * The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType * integer). */ - @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; /* * A copy activity sink. */ - @JsonIgnore private Map additionalProperties; /** @@ -271,7 +212,6 @@ public CopySink withDisableMetricsCollection(Object disableMetricsCollection) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -287,14 +227,6 @@ public CopySink withAdditionalProperties(Map additionalPropertie return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -302,4 +234,180 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("writeBatchSize", this.writeBatchSize); + jsonWriter.writeUntypedField("writeBatchTimeout", this.writeBatchTimeout); + jsonWriter.writeUntypedField("sinkRetryCount", this.sinkRetryCount); + jsonWriter.writeUntypedField("sinkRetryWait", this.sinkRetryWait); + jsonWriter.writeUntypedField("maxConcurrentConnections", this.maxConcurrentConnections); + jsonWriter.writeUntypedField("disableMetricsCollection", this.disableMetricsCollection); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopySink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopySink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the CopySink. + */ + public static CopySink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("DelimitedTextSink".equals(discriminatorValue)) { + return DelimitedTextSink.fromJson(readerToUse.reset()); + } else if ("JsonSink".equals(discriminatorValue)) { + return JsonSink.fromJson(readerToUse.reset()); + } else if ("OrcSink".equals(discriminatorValue)) { + return OrcSink.fromJson(readerToUse.reset()); + } else if ("RestSink".equals(discriminatorValue)) { + return RestSink.fromJson(readerToUse.reset()); + } else if ("AzurePostgreSqlSink".equals(discriminatorValue)) { + return AzurePostgreSqlSink.fromJson(readerToUse.reset()); + } else if ("AzureMySqlSink".equals(discriminatorValue)) { + return AzureMySqlSink.fromJson(readerToUse.reset()); + } else if ("AzureDatabricksDeltaLakeSink".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeSink.fromJson(readerToUse.reset()); + } else if ("WarehouseSink".equals(discriminatorValue)) { + return WarehouseSink.fromJson(readerToUse.reset()); + } else if ("SapCloudForCustomerSink".equals(discriminatorValue)) { + return SapCloudForCustomerSink.fromJson(readerToUse.reset()); + } else if ("AzureQueueSink".equals(discriminatorValue)) { + return AzureQueueSink.fromJson(readerToUse.reset()); + } else if ("AzureTableSink".equals(discriminatorValue)) { + return AzureTableSink.fromJson(readerToUse.reset()); + } else if ("AvroSink".equals(discriminatorValue)) { + return AvroSink.fromJson(readerToUse.reset()); + } else if ("ParquetSink".equals(discriminatorValue)) { + return ParquetSink.fromJson(readerToUse.reset()); + } else if ("BinarySink".equals(discriminatorValue)) { + return BinarySink.fromJson(readerToUse.reset()); + } else if ("BlobSink".equals(discriminatorValue)) { + return BlobSink.fromJson(readerToUse.reset()); + } else if ("FileSystemSink".equals(discriminatorValue)) { + return FileSystemSink.fromJson(readerToUse.reset()); + } else if ("DocumentDbCollectionSink".equals(discriminatorValue)) { + return DocumentDbCollectionSink.fromJson(readerToUse.reset()); + } else if ("CosmosDbSqlApiSink".equals(discriminatorValue)) { + return CosmosDbSqlApiSink.fromJson(readerToUse.reset()); + } else if ("SqlSink".equals(discriminatorValue)) { + return SqlSink.fromJson(readerToUse.reset()); + } else if ("SqlServerSink".equals(discriminatorValue)) { + return SqlServerSink.fromJson(readerToUse.reset()); + } else if ("AzureSqlSink".equals(discriminatorValue)) { + return AzureSqlSink.fromJson(readerToUse.reset()); + } else if ("SqlMISink".equals(discriminatorValue)) { + return SqlMISink.fromJson(readerToUse.reset()); + } else if ("SqlDWSink".equals(discriminatorValue)) { + return SqlDWSink.fromJson(readerToUse.reset()); + } else if ("SnowflakeSink".equals(discriminatorValue)) { + return SnowflakeSink.fromJson(readerToUse.reset()); + } else if ("SnowflakeV2Sink".equals(discriminatorValue)) { + return SnowflakeV2Sink.fromJson(readerToUse.reset()); + } else if ("OracleSink".equals(discriminatorValue)) { + return OracleSink.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreSink".equals(discriminatorValue)) { + return AzureDataLakeStoreSink.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSSink".equals(discriminatorValue)) { + return AzureBlobFSSink.fromJson(readerToUse.reset()); + } else if ("AzureSearchIndexSink".equals(discriminatorValue)) { + return AzureSearchIndexSink.fromJson(readerToUse.reset()); + } else if ("OdbcSink".equals(discriminatorValue)) { + return OdbcSink.fromJson(readerToUse.reset()); + } else if ("InformixSink".equals(discriminatorValue)) { + return InformixSink.fromJson(readerToUse.reset()); + } else if ("MicrosoftAccessSink".equals(discriminatorValue)) { + return MicrosoftAccessSink.fromJson(readerToUse.reset()); + } else if ("DynamicsSink".equals(discriminatorValue)) { + return DynamicsSink.fromJson(readerToUse.reset()); + } else if ("DynamicsCrmSink".equals(discriminatorValue)) { + return DynamicsCrmSink.fromJson(readerToUse.reset()); + } else if ("CommonDataServiceForAppsSink".equals(discriminatorValue)) { + return CommonDataServiceForAppsSink.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorerSink".equals(discriminatorValue)) { + return AzureDataExplorerSink.fromJson(readerToUse.reset()); + } else if ("SalesforceSink".equals(discriminatorValue)) { + return SalesforceSink.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudSink".equals(discriminatorValue)) { + return SalesforceServiceCloudSink.fromJson(readerToUse.reset()); + } else if ("MongoDbAtlasSink".equals(discriminatorValue)) { + return MongoDbAtlasSink.fromJson(readerToUse.reset()); + } else if ("MongoDbV2Sink".equals(discriminatorValue)) { + return MongoDbV2Sink.fromJson(readerToUse.reset()); + } else if ("CosmosDbMongoDbApiSink".equals(discriminatorValue)) { + return CosmosDbMongoDbApiSink.fromJson(readerToUse.reset()); + } else if ("LakeHouseTableSink".equals(discriminatorValue)) { + return LakeHouseTableSink.fromJson(readerToUse.reset()); + } else if ("SalesforceV2Sink".equals(discriminatorValue)) { + return SalesforceV2Sink.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudV2Sink".equals(discriminatorValue)) { + return SalesforceServiceCloudV2Sink.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static CopySink fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopySink deserializedCopySink = new CopySink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCopySink.type = reader.getString(); + } else if ("writeBatchSize".equals(fieldName)) { + deserializedCopySink.writeBatchSize = reader.readUntyped(); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedCopySink.writeBatchTimeout = reader.readUntyped(); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedCopySink.sinkRetryCount = reader.readUntyped(); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedCopySink.sinkRetryWait = reader.readUntyped(); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCopySink.maxConcurrentConnections = reader.readUntyped(); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCopySink.disableMetricsCollection = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCopySink.additionalProperties = additionalProperties; + + return deserializedCopySink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java index 016a1d5943de..1215dd0b3e3c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopySource.java @@ -5,103 +5,50 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * A copy activity source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopySource.class, visible = true) -@JsonTypeName("CopySource") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AvroSource", value = AvroSource.class), - @JsonSubTypes.Type(name = "ExcelSource", value = ExcelSource.class), - @JsonSubTypes.Type(name = "ParquetSource", value = ParquetSource.class), - @JsonSubTypes.Type(name = "DelimitedTextSource", value = DelimitedTextSource.class), - @JsonSubTypes.Type(name = "JsonSource", value = JsonSource.class), - @JsonSubTypes.Type(name = "XmlSource", value = XmlSource.class), - @JsonSubTypes.Type(name = "OrcSource", value = OrcSource.class), - @JsonSubTypes.Type(name = "BinarySource", value = BinarySource.class), - @JsonSubTypes.Type(name = "TabularSource", value = TabularSource.class), - @JsonSubTypes.Type(name = "BlobSource", value = BlobSource.class), - @JsonSubTypes.Type(name = "DocumentDbCollectionSource", value = DocumentDbCollectionSource.class), - @JsonSubTypes.Type(name = "CosmosDbSqlApiSource", value = CosmosDbSqlApiSource.class), - @JsonSubTypes.Type(name = "DynamicsSource", value = DynamicsSource.class), - @JsonSubTypes.Type(name = "DynamicsCrmSource", value = DynamicsCrmSource.class), - @JsonSubTypes.Type(name = "CommonDataServiceForAppsSource", value = CommonDataServiceForAppsSource.class), - @JsonSubTypes.Type(name = "RelationalSource", value = RelationalSource.class), - @JsonSubTypes.Type(name = "MicrosoftAccessSource", value = MicrosoftAccessSource.class), - @JsonSubTypes.Type(name = "ODataSource", value = ODataSource.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudSource", value = SalesforceServiceCloudSource.class), - @JsonSubTypes.Type(name = "RestSource", value = RestSource.class), - @JsonSubTypes.Type(name = "FileSystemSource", value = FileSystemSource.class), - @JsonSubTypes.Type(name = "HdfsSource", value = HdfsSource.class), - @JsonSubTypes.Type(name = "AzureDataExplorerSource", value = AzureDataExplorerSource.class), - @JsonSubTypes.Type(name = "OracleSource", value = OracleSource.class), - @JsonSubTypes.Type(name = "AmazonRdsForOracleSource", value = AmazonRdsForOracleSource.class), - @JsonSubTypes.Type(name = "WebSource", value = WebSource.class), - @JsonSubTypes.Type(name = "MongoDbSource", value = MongoDbSource.class), - @JsonSubTypes.Type(name = "MongoDbAtlasSource", value = MongoDbAtlasSource.class), - @JsonSubTypes.Type(name = "MongoDbV2Source", value = MongoDbV2Source.class), - @JsonSubTypes.Type(name = "CosmosDbMongoDbApiSource", value = CosmosDbMongoDbApiSource.class), - @JsonSubTypes.Type(name = "Office365Source", value = Office365Source.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreSource", value = AzureDataLakeStoreSource.class), - @JsonSubTypes.Type(name = "AzureBlobFSSource", value = AzureBlobFSSource.class), - @JsonSubTypes.Type(name = "HttpSource", value = HttpSource.class), - @JsonSubTypes.Type(name = "LakeHouseTableSource", value = LakeHouseTableSource.class), - @JsonSubTypes.Type(name = "SnowflakeSource", value = SnowflakeSource.class), - @JsonSubTypes.Type(name = "SnowflakeV2Source", value = SnowflakeV2Source.class), - @JsonSubTypes.Type(name = "AzureDatabricksDeltaLakeSource", value = AzureDatabricksDeltaLakeSource.class), - @JsonSubTypes.Type(name = "SharePointOnlineListSource", value = SharePointOnlineListSource.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudV2Source", value = SalesforceServiceCloudV2Source.class) }) @Fluent -public class CopySource { +public class CopySource implements JsonSerializable { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CopySource"; /* * Source retry count. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "sourceRetryCount") private Object sourceRetryCount; /* * Source retry wait. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "sourceRetryWait") private Object sourceRetryWait; /* * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType * integer). */ - @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; /* * A copy activity source. */ - @JsonIgnore private Map additionalProperties; /** @@ -210,7 +157,6 @@ public CopySource withDisableMetricsCollection(Object disableMetricsCollection) * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -226,14 +172,6 @@ public CopySource withAdditionalProperties(Map additionalPropert return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -241,4 +179,294 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sourceRetryCount", this.sourceRetryCount); + jsonWriter.writeUntypedField("sourceRetryWait", this.sourceRetryWait); + jsonWriter.writeUntypedField("maxConcurrentConnections", this.maxConcurrentConnections); + jsonWriter.writeUntypedField("disableMetricsCollection", this.disableMetricsCollection); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopySource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopySource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the CopySource. + */ + public static CopySource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AvroSource".equals(discriminatorValue)) { + return AvroSource.fromJson(readerToUse.reset()); + } else if ("ExcelSource".equals(discriminatorValue)) { + return ExcelSource.fromJson(readerToUse.reset()); + } else if ("ParquetSource".equals(discriminatorValue)) { + return ParquetSource.fromJson(readerToUse.reset()); + } else if ("DelimitedTextSource".equals(discriminatorValue)) { + return DelimitedTextSource.fromJson(readerToUse.reset()); + } else if ("JsonSource".equals(discriminatorValue)) { + return JsonSource.fromJson(readerToUse.reset()); + } else if ("XmlSource".equals(discriminatorValue)) { + return XmlSource.fromJson(readerToUse.reset()); + } else if ("OrcSource".equals(discriminatorValue)) { + return OrcSource.fromJson(readerToUse.reset()); + } else if ("BinarySource".equals(discriminatorValue)) { + return BinarySource.fromJson(readerToUse.reset()); + } else if ("TabularSource".equals(discriminatorValue)) { + return TabularSource.fromJsonKnownDiscriminator(readerToUse.reset()); + } else if ("AzureTableSource".equals(discriminatorValue)) { + return AzureTableSource.fromJson(readerToUse.reset()); + } else if ("InformixSource".equals(discriminatorValue)) { + return InformixSource.fromJson(readerToUse.reset()); + } else if ("Db2Source".equals(discriminatorValue)) { + return Db2Source.fromJson(readerToUse.reset()); + } else if ("OdbcSource".equals(discriminatorValue)) { + return OdbcSource.fromJson(readerToUse.reset()); + } else if ("MySqlSource".equals(discriminatorValue)) { + return MySqlSource.fromJson(readerToUse.reset()); + } else if ("PostgreSqlSource".equals(discriminatorValue)) { + return PostgreSqlSource.fromJson(readerToUse.reset()); + } else if ("PostgreSqlV2Source".equals(discriminatorValue)) { + return PostgreSqlV2Source.fromJson(readerToUse.reset()); + } else if ("SybaseSource".equals(discriminatorValue)) { + return SybaseSource.fromJson(readerToUse.reset()); + } else if ("SapBwSource".equals(discriminatorValue)) { + return SapBwSource.fromJson(readerToUse.reset()); + } else if ("SalesforceSource".equals(discriminatorValue)) { + return SalesforceSource.fromJson(readerToUse.reset()); + } else if ("SapCloudForCustomerSource".equals(discriminatorValue)) { + return SapCloudForCustomerSource.fromJson(readerToUse.reset()); + } else if ("SapEccSource".equals(discriminatorValue)) { + return SapEccSource.fromJson(readerToUse.reset()); + } else if ("SapHanaSource".equals(discriminatorValue)) { + return SapHanaSource.fromJson(readerToUse.reset()); + } else if ("SapOpenHubSource".equals(discriminatorValue)) { + return SapOpenHubSource.fromJson(readerToUse.reset()); + } else if ("SapOdpSource".equals(discriminatorValue)) { + return SapOdpSource.fromJson(readerToUse.reset()); + } else if ("SapTableSource".equals(discriminatorValue)) { + return SapTableSource.fromJson(readerToUse.reset()); + } else if ("SqlSource".equals(discriminatorValue)) { + return SqlSource.fromJson(readerToUse.reset()); + } else if ("SqlServerSource".equals(discriminatorValue)) { + return SqlServerSource.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForSqlServerSource".equals(discriminatorValue)) { + return AmazonRdsForSqlServerSource.fromJson(readerToUse.reset()); + } else if ("AzureSqlSource".equals(discriminatorValue)) { + return AzureSqlSource.fromJson(readerToUse.reset()); + } else if ("SqlMISource".equals(discriminatorValue)) { + return SqlMISource.fromJson(readerToUse.reset()); + } else if ("SqlDWSource".equals(discriminatorValue)) { + return SqlDWSource.fromJson(readerToUse.reset()); + } else if ("AzureMySqlSource".equals(discriminatorValue)) { + return AzureMySqlSource.fromJson(readerToUse.reset()); + } else if ("TeradataSource".equals(discriminatorValue)) { + return TeradataSource.fromJson(readerToUse.reset()); + } else if ("CassandraSource".equals(discriminatorValue)) { + return CassandraSource.fromJson(readerToUse.reset()); + } else if ("AmazonMWSSource".equals(discriminatorValue)) { + return AmazonMwsSource.fromJson(readerToUse.reset()); + } else if ("AzurePostgreSqlSource".equals(discriminatorValue)) { + return AzurePostgreSqlSource.fromJson(readerToUse.reset()); + } else if ("ConcurSource".equals(discriminatorValue)) { + return ConcurSource.fromJson(readerToUse.reset()); + } else if ("CouchbaseSource".equals(discriminatorValue)) { + return CouchbaseSource.fromJson(readerToUse.reset()); + } else if ("DrillSource".equals(discriminatorValue)) { + return DrillSource.fromJson(readerToUse.reset()); + } else if ("EloquaSource".equals(discriminatorValue)) { + return EloquaSource.fromJson(readerToUse.reset()); + } else if ("GoogleBigQuerySource".equals(discriminatorValue)) { + return GoogleBigQuerySource.fromJson(readerToUse.reset()); + } else if ("GoogleBigQueryV2Source".equals(discriminatorValue)) { + return GoogleBigQueryV2Source.fromJson(readerToUse.reset()); + } else if ("GreenplumSource".equals(discriminatorValue)) { + return GreenplumSource.fromJson(readerToUse.reset()); + } else if ("HBaseSource".equals(discriminatorValue)) { + return HBaseSource.fromJson(readerToUse.reset()); + } else if ("HiveSource".equals(discriminatorValue)) { + return HiveSource.fromJson(readerToUse.reset()); + } else if ("HubspotSource".equals(discriminatorValue)) { + return HubspotSource.fromJson(readerToUse.reset()); + } else if ("ImpalaSource".equals(discriminatorValue)) { + return ImpalaSource.fromJson(readerToUse.reset()); + } else if ("JiraSource".equals(discriminatorValue)) { + return JiraSource.fromJson(readerToUse.reset()); + } else if ("MagentoSource".equals(discriminatorValue)) { + return MagentoSource.fromJson(readerToUse.reset()); + } else if ("MariaDBSource".equals(discriminatorValue)) { + return MariaDBSource.fromJson(readerToUse.reset()); + } else if ("AzureMariaDBSource".equals(discriminatorValue)) { + return AzureMariaDBSource.fromJson(readerToUse.reset()); + } else if ("MarketoSource".equals(discriminatorValue)) { + return MarketoSource.fromJson(readerToUse.reset()); + } else if ("PaypalSource".equals(discriminatorValue)) { + return PaypalSource.fromJson(readerToUse.reset()); + } else if ("PhoenixSource".equals(discriminatorValue)) { + return PhoenixSource.fromJson(readerToUse.reset()); + } else if ("PrestoSource".equals(discriminatorValue)) { + return PrestoSource.fromJson(readerToUse.reset()); + } else if ("QuickBooksSource".equals(discriminatorValue)) { + return QuickBooksSource.fromJson(readerToUse.reset()); + } else if ("ServiceNowSource".equals(discriminatorValue)) { + return ServiceNowSource.fromJson(readerToUse.reset()); + } else if ("ShopifySource".equals(discriminatorValue)) { + return ShopifySource.fromJson(readerToUse.reset()); + } else if ("SparkSource".equals(discriminatorValue)) { + return SparkSource.fromJson(readerToUse.reset()); + } else if ("SquareSource".equals(discriminatorValue)) { + return SquareSource.fromJson(readerToUse.reset()); + } else if ("XeroSource".equals(discriminatorValue)) { + return XeroSource.fromJson(readerToUse.reset()); + } else if ("ZohoSource".equals(discriminatorValue)) { + return ZohoSource.fromJson(readerToUse.reset()); + } else if ("NetezzaSource".equals(discriminatorValue)) { + return NetezzaSource.fromJson(readerToUse.reset()); + } else if ("VerticaSource".equals(discriminatorValue)) { + return VerticaSource.fromJson(readerToUse.reset()); + } else if ("SalesforceMarketingCloudSource".equals(discriminatorValue)) { + return SalesforceMarketingCloudSource.fromJson(readerToUse.reset()); + } else if ("ResponsysSource".equals(discriminatorValue)) { + return ResponsysSource.fromJson(readerToUse.reset()); + } else if ("DynamicsAXSource".equals(discriminatorValue)) { + return DynamicsAXSource.fromJson(readerToUse.reset()); + } else if ("OracleServiceCloudSource".equals(discriminatorValue)) { + return OracleServiceCloudSource.fromJson(readerToUse.reset()); + } else if ("GoogleAdWordsSource".equals(discriminatorValue)) { + return GoogleAdWordsSource.fromJson(readerToUse.reset()); + } else if ("AmazonRedshiftSource".equals(discriminatorValue)) { + return AmazonRedshiftSource.fromJson(readerToUse.reset()); + } else if ("WarehouseSource".equals(discriminatorValue)) { + return WarehouseSource.fromJson(readerToUse.reset()); + } else if ("SalesforceV2Source".equals(discriminatorValue)) { + return SalesforceV2Source.fromJson(readerToUse.reset()); + } else if ("ServiceNowV2Source".equals(discriminatorValue)) { + return ServiceNowV2Source.fromJson(readerToUse.reset()); + } else if ("BlobSource".equals(discriminatorValue)) { + return BlobSource.fromJson(readerToUse.reset()); + } else if ("DocumentDbCollectionSource".equals(discriminatorValue)) { + return DocumentDbCollectionSource.fromJson(readerToUse.reset()); + } else if ("CosmosDbSqlApiSource".equals(discriminatorValue)) { + return CosmosDbSqlApiSource.fromJson(readerToUse.reset()); + } else if ("DynamicsSource".equals(discriminatorValue)) { + return DynamicsSource.fromJson(readerToUse.reset()); + } else if ("DynamicsCrmSource".equals(discriminatorValue)) { + return DynamicsCrmSource.fromJson(readerToUse.reset()); + } else if ("CommonDataServiceForAppsSource".equals(discriminatorValue)) { + return CommonDataServiceForAppsSource.fromJson(readerToUse.reset()); + } else if ("RelationalSource".equals(discriminatorValue)) { + return RelationalSource.fromJson(readerToUse.reset()); + } else if ("MicrosoftAccessSource".equals(discriminatorValue)) { + return MicrosoftAccessSource.fromJson(readerToUse.reset()); + } else if ("ODataSource".equals(discriminatorValue)) { + return ODataSource.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudSource".equals(discriminatorValue)) { + return SalesforceServiceCloudSource.fromJson(readerToUse.reset()); + } else if ("RestSource".equals(discriminatorValue)) { + return RestSource.fromJson(readerToUse.reset()); + } else if ("FileSystemSource".equals(discriminatorValue)) { + return FileSystemSource.fromJson(readerToUse.reset()); + } else if ("HdfsSource".equals(discriminatorValue)) { + return HdfsSource.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorerSource".equals(discriminatorValue)) { + return AzureDataExplorerSource.fromJson(readerToUse.reset()); + } else if ("OracleSource".equals(discriminatorValue)) { + return OracleSource.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForOracleSource".equals(discriminatorValue)) { + return AmazonRdsForOracleSource.fromJson(readerToUse.reset()); + } else if ("WebSource".equals(discriminatorValue)) { + return WebSource.fromJson(readerToUse.reset()); + } else if ("MongoDbSource".equals(discriminatorValue)) { + return MongoDbSource.fromJson(readerToUse.reset()); + } else if ("MongoDbAtlasSource".equals(discriminatorValue)) { + return MongoDbAtlasSource.fromJson(readerToUse.reset()); + } else if ("MongoDbV2Source".equals(discriminatorValue)) { + return MongoDbV2Source.fromJson(readerToUse.reset()); + } else if ("CosmosDbMongoDbApiSource".equals(discriminatorValue)) { + return CosmosDbMongoDbApiSource.fromJson(readerToUse.reset()); + } else if ("Office365Source".equals(discriminatorValue)) { + return Office365Source.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreSource".equals(discriminatorValue)) { + return AzureDataLakeStoreSource.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSSource".equals(discriminatorValue)) { + return AzureBlobFSSource.fromJson(readerToUse.reset()); + } else if ("HttpSource".equals(discriminatorValue)) { + return HttpSource.fromJson(readerToUse.reset()); + } else if ("LakeHouseTableSource".equals(discriminatorValue)) { + return LakeHouseTableSource.fromJson(readerToUse.reset()); + } else if ("SnowflakeSource".equals(discriminatorValue)) { + return SnowflakeSource.fromJson(readerToUse.reset()); + } else if ("SnowflakeV2Source".equals(discriminatorValue)) { + return SnowflakeV2Source.fromJson(readerToUse.reset()); + } else if ("AzureDatabricksDeltaLakeSource".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeSource.fromJson(readerToUse.reset()); + } else if ("SharePointOnlineListSource".equals(discriminatorValue)) { + return SharePointOnlineListSource.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudV2Source".equals(discriminatorValue)) { + return SalesforceServiceCloudV2Source.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static CopySource fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopySource deserializedCopySource = new CopySource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCopySource.type = reader.getString(); + } else if ("sourceRetryCount".equals(fieldName)) { + deserializedCopySource.sourceRetryCount = reader.readUntyped(); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCopySource.sourceRetryWait = reader.readUntyped(); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCopySource.maxConcurrentConnections = reader.readUntyped(); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCopySource.disableMetricsCollection = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCopySource.additionalProperties = additionalProperties; + + return deserializedCopySource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java index 22bc6e252d70..0e62744fc875 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CopyTranslator.java @@ -5,36 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * A copy activity translator. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CopyTranslator.class, visible = true) -@JsonTypeName("CopyTranslator") -@JsonSubTypes({ @JsonSubTypes.Type(name = "TabularTranslator", value = TabularTranslator.class) }) @Fluent -public class CopyTranslator { +public class CopyTranslator implements JsonSerializable { /* * Copy translator type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CopyTranslator"; /* * A copy activity translator. */ - @JsonIgnore private Map additionalProperties; /** @@ -57,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -73,14 +63,6 @@ public CopyTranslator withAdditionalProperties(Map additionalPro return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -88,4 +70,76 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CopyTranslator from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CopyTranslator if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the CopyTranslator. + */ + public static CopyTranslator fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("TabularTranslator".equals(discriminatorValue)) { + return TabularTranslator.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static CopyTranslator fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CopyTranslator deserializedCopyTranslator = new CopyTranslator(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCopyTranslator.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCopyTranslator.additionalProperties = additionalProperties; + + return deserializedCopyTranslator; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbConnectionMode.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbConnectionMode.java index 511d8cc6f325..34d5a75871b5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbConnectionMode.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbConnectionMode.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public CosmosDbConnectionMode() { * @param name a name to look for. * @return the corresponding CosmosDbConnectionMode. */ - @JsonCreator public static CosmosDbConnectionMode fromString(String name) { return fromString(name, CosmosDbConnectionMode.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java index 115c5d1d15a4..4cd2ee7fd41c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Azure Cosmos Database (CosmosDB) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbLinkedService.class, visible = true) -@JsonTypeName("CosmosDb") @Fluent public final class CosmosDbLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDb"; /* * CosmosDB linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private CosmosDbLinkedServiceTypeProperties innerTypeProperties = new CosmosDbLinkedServiceTypeProperties(); /** @@ -413,4 +409,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbLinkedService. + */ + public static CosmosDbLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbLinkedService deserializedCosmosDbLinkedService = new CosmosDbLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCosmosDbLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCosmosDbLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCosmosDbLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCosmosDbLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCosmosDbLinkedService.innerTypeProperties + = CosmosDbLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java index d8103fcb4c50..5704355290fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiCollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiCollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The CosmosDB (MongoDB API) database dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CosmosDbMongoDbApiCollectionDataset.class, - visible = true) -@JsonTypeName("CosmosDbMongoDbApiCollection") @Fluent public final class CosmosDbMongoDbApiCollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbMongoDbApiCollection"; /* * CosmosDB (MongoDB API) database dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private CosmosDbMongoDbApiCollectionDatasetTypeProperties innerTypeProperties = new CosmosDbMongoDbApiCollectionDatasetTypeProperties(); @@ -170,4 +162,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbMongoDbApiCollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiCollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiCollectionDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiCollectionDataset. + */ + public static CosmosDbMongoDbApiCollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiCollectionDataset deserializedCosmosDbMongoDbApiCollectionDataset + = new CosmosDbMongoDbApiCollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCosmosDbMongoDbApiCollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCosmosDbMongoDbApiCollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.innerTypeProperties + = CosmosDbMongoDbApiCollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbMongoDbApiCollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbMongoDbApiCollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbMongoDbApiCollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java index f2f1fd953192..014ad368ae32 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for CosmosDB (MongoDB API) data source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CosmosDbMongoDbApiLinkedService.class, - visible = true) -@JsonTypeName("CosmosDbMongoDbApi") @Fluent public final class CosmosDbMongoDbApiLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbMongoDbApi"; /* * CosmosDB (MongoDB API) linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private CosmosDbMongoDbApiLinkedServiceTypeProperties innerTypeProperties = new CosmosDbMongoDbApiLinkedServiceTypeProperties(); @@ -193,4 +185,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbMongoDbApiLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiLinkedService. + */ + public static CosmosDbMongoDbApiLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiLinkedService deserializedCosmosDbMongoDbApiLinkedService + = new CosmosDbMongoDbApiLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCosmosDbMongoDbApiLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCosmosDbMongoDbApiLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedService.innerTypeProperties + = CosmosDbMongoDbApiLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbMongoDbApiLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbMongoDbApiLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbMongoDbApiLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java index 3fa9c76cec78..08a47c009d0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSink.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity sink for a CosmosDB (MongoDB API) database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbMongoDbApiSink.class, visible = true) -@JsonTypeName("CosmosDbMongoDbApiSink") @Fluent public final class CosmosDbMongoDbApiSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbMongoDbApiSink"; /* @@ -29,7 +27,6 @@ public final class CosmosDbMongoDbApiSink extends CopySink { * default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -135,4 +132,72 @@ public CosmosDbMongoDbApiSink withDisableMetricsCollection(Object disableMetrics public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiSink if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiSink. + */ + public static CosmosDbMongoDbApiSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiSink deserializedCosmosDbMongoDbApiSink = new CosmosDbMongoDbApiSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbMongoDbApiSink.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbMongoDbApiSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java index 882f7b397727..3f319408b95c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbMongoDbApiSource.java @@ -5,40 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for a CosmosDB (MongoDB API) database. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CosmosDbMongoDbApiSource.class, - visible = true) -@JsonTypeName("CosmosDbMongoDbApiSource") @Fluent public final class CosmosDbMongoDbApiSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbMongoDbApiSource"; /* * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or * pass an empty document ({}). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "filter") private Object filter; /* * Cursor methods for Mongodb query. */ - @JsonProperty(value = "cursorMethods") private MongoDbCursorMethodsProperties cursorMethods; /* @@ -46,21 +38,18 @@ public final class CosmosDbMongoDbApiSource extends CopySource { * modifying the batch size will not affect the user or the application. This property's main purpose is to avoid * hit the limitation of response size. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "batchSize") private Object batchSize; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -241,4 +230,79 @@ public void validate() { cursorMethods().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("filter", this.filter); + jsonWriter.writeJsonField("cursorMethods", this.cursorMethods); + jsonWriter.writeUntypedField("batchSize", this.batchSize); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbMongoDbApiSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbMongoDbApiSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CosmosDbMongoDbApiSource. + */ + public static CosmosDbMongoDbApiSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbMongoDbApiSource deserializedCosmosDbMongoDbApiSource = new CosmosDbMongoDbApiSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.type = reader.getString(); + } else if ("filter".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.filter = reader.readUntyped(); + } else if ("cursorMethods".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.cursorMethods + = MongoDbCursorMethodsProperties.fromJson(reader); + } else if ("batchSize".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.batchSize = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedCosmosDbMongoDbApiSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbMongoDbApiSource.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbMongoDbApiSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java index fdfc1a9fa8d6..1b228d0139d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiCollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbSqlApiCollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Azure CosmosDB (SQL API) Collection dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CosmosDbSqlApiCollectionDataset.class, - visible = true) -@JsonTypeName("CosmosDbSqlApiCollection") @Fluent public final class CosmosDbSqlApiCollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbSqlApiCollection"; /* * CosmosDB (SQL API) Collection dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private CosmosDbSqlApiCollectionDatasetTypeProperties innerTypeProperties = new CosmosDbSqlApiCollectionDatasetTypeProperties(); @@ -170,4 +162,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CosmosDbSqlApiCollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbSqlApiCollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbSqlApiCollectionDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CosmosDbSqlApiCollectionDataset. + */ + public static CosmosDbSqlApiCollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbSqlApiCollectionDataset deserializedCosmosDbSqlApiCollectionDataset + = new CosmosDbSqlApiCollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCosmosDbSqlApiCollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCosmosDbSqlApiCollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.innerTypeProperties + = CosmosDbSqlApiCollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbSqlApiCollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbSqlApiCollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbSqlApiCollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java index d480469f4cfa..eafdaa294c1a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSink.java @@ -5,30 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure CosmosDB (SQL API) Collection sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbSqlApiSink.class, visible = true) -@JsonTypeName("CosmosDbSqlApiSink") @Fluent public final class CosmosDbSqlApiSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbSqlApiSink"; /* * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed * values: insert and upsert. */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -132,4 +129,72 @@ public CosmosDbSqlApiSink withDisableMetricsCollection(Object disableMetricsColl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbSqlApiSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbSqlApiSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the CosmosDbSqlApiSink. + */ + public static CosmosDbSqlApiSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbSqlApiSink deserializedCosmosDbSqlApiSink = new CosmosDbSqlApiSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedCosmosDbSqlApiSink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbSqlApiSink.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbSqlApiSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java index 129f80f95136..db4d3769dc71 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CosmosDbSqlApiSource.java @@ -5,54 +5,47 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure CosmosDB (SQL API) Collection source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CosmosDbSqlApiSource.class, visible = true) -@JsonTypeName("CosmosDbSqlApiSource") @Fluent public final class CosmosDbSqlApiSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CosmosDbSqlApiSource"; /* * SQL API query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Page size of the result. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "pageSize") private Object pageSize; /* * Preferred regions. Type: array of strings (or Expression with resultType array of strings). */ - @JsonProperty(value = "preferredRegions") private Object preferredRegions; /* * Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "detectDatetime") private Object detectDatetime; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -222,4 +215,78 @@ public CosmosDbSqlApiSource withDisableMetricsCollection(Object disableMetricsCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("pageSize", this.pageSize); + jsonWriter.writeUntypedField("preferredRegions", this.preferredRegions); + jsonWriter.writeUntypedField("detectDatetime", this.detectDatetime); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CosmosDbSqlApiSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CosmosDbSqlApiSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the CosmosDbSqlApiSource. + */ + public static CosmosDbSqlApiSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CosmosDbSqlApiSource deserializedCosmosDbSqlApiSource = new CosmosDbSqlApiSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.query = reader.readUntyped(); + } else if ("pageSize".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.pageSize = reader.readUntyped(); + } else if ("preferredRegions".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.preferredRegions = reader.readUntyped(); + } else if ("detectDatetime".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.detectDatetime = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedCosmosDbSqlApiSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCosmosDbSqlApiSource.withAdditionalProperties(additionalProperties); + + return deserializedCosmosDbSqlApiSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java index 3397a42b5275..92ded2c4d487 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CouchbaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Couchbase server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseLinkedService.class, visible = true) -@JsonTypeName("Couchbase") @Fluent public final class CouchbaseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Couchbase"; /* * Couchbase server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private CouchbaseLinkedServiceTypeProperties innerTypeProperties = new CouchbaseLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CouchbaseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CouchbaseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CouchbaseLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CouchbaseLinkedService. + */ + public static CouchbaseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CouchbaseLinkedService deserializedCouchbaseLinkedService = new CouchbaseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCouchbaseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCouchbaseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCouchbaseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCouchbaseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCouchbaseLinkedService.innerTypeProperties + = CouchbaseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCouchbaseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCouchbaseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCouchbaseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java index 5d75fb9b8bee..3b83054d25e1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Couchbase server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseSource.class, visible = true) -@JsonTypeName("CouchbaseSource") @Fluent public final class CouchbaseSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CouchbaseSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public CouchbaseSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CouchbaseSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CouchbaseSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the CouchbaseSource. + */ + public static CouchbaseSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CouchbaseSource deserializedCouchbaseSource = new CouchbaseSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedCouchbaseSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedCouchbaseSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedCouchbaseSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedCouchbaseSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedCouchbaseSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedCouchbaseSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedCouchbaseSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedCouchbaseSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCouchbaseSource.withAdditionalProperties(additionalProperties); + + return deserializedCouchbaseSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java index 050f93080fd5..e94a9b171d65 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CouchbaseTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Couchbase server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CouchbaseTableDataset.class, visible = true) -@JsonTypeName("CouchbaseTable") @Fluent public final class CouchbaseTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CouchbaseTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CouchbaseTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CouchbaseTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CouchbaseTableDataset. + */ + public static CouchbaseTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CouchbaseTableDataset deserializedCouchbaseTableDataset = new CouchbaseTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCouchbaseTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCouchbaseTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCouchbaseTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCouchbaseTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCouchbaseTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCouchbaseTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCouchbaseTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedCouchbaseTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedCouchbaseTableDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCouchbaseTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedCouchbaseTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java index 32e8169a0506..1c17ddd29720 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateDataFlowDebugSessionRequest.java @@ -5,36 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Request body structure for creating data flow debug session. */ @Fluent -public final class CreateDataFlowDebugSessionRequest { +public final class CreateDataFlowDebugSessionRequest implements JsonSerializable { /* * Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if * provided. */ - @JsonProperty(value = "computeType") private String computeType; /* * Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */ - @JsonProperty(value = "coreCount") private Integer coreCount; /* * Time to live setting of the cluster in minutes. */ - @JsonProperty(value = "timeToLive") private Integer timeToLive; /* * Set to use integration runtime setting for data flow debug session. */ - @JsonProperty(value = "integrationRuntime") private IntegrationRuntimeDebugResource integrationRuntime; /** @@ -138,4 +138,51 @@ public void validate() { integrationRuntime().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("computeType", this.computeType); + jsonWriter.writeNumberField("coreCount", this.coreCount); + jsonWriter.writeNumberField("timeToLive", this.timeToLive); + jsonWriter.writeJsonField("integrationRuntime", this.integrationRuntime); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CreateDataFlowDebugSessionRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CreateDataFlowDebugSessionRequest if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CreateDataFlowDebugSessionRequest. + */ + public static CreateDataFlowDebugSessionRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CreateDataFlowDebugSessionRequest deserializedCreateDataFlowDebugSessionRequest + = new CreateDataFlowDebugSessionRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("computeType".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionRequest.computeType = reader.getString(); + } else if ("coreCount".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionRequest.coreCount = reader.getNullable(JsonReader::getInt); + } else if ("timeToLive".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionRequest.timeToLive = reader.getNullable(JsonReader::getInt); + } else if ("integrationRuntime".equals(fieldName)) { + deserializedCreateDataFlowDebugSessionRequest.integrationRuntime + = IntegrationRuntimeDebugResource.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedCreateDataFlowDebugSessionRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateLinkedIntegrationRuntimeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateLinkedIntegrationRuntimeRequest.java index 27ddc0ea44fe..5ff44b23618a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateLinkedIntegrationRuntimeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CreateLinkedIntegrationRuntimeRequest.java @@ -5,35 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The linked integration runtime information. */ @Fluent -public final class CreateLinkedIntegrationRuntimeRequest { +public final class CreateLinkedIntegrationRuntimeRequest + implements JsonSerializable { /* * The name of the linked integration runtime. */ - @JsonProperty(value = "name") private String name; /* * The ID of the subscription that the linked integration runtime belongs to. */ - @JsonProperty(value = "subscriptionId") private String subscriptionId; /* * The name of the data factory that the linked integration runtime belongs to. */ - @JsonProperty(value = "dataFactoryName") private String dataFactoryName; /* * The location of the data factory that the linked integration runtime belongs to. */ - @JsonProperty(value = "dataFactoryLocation") private String dataFactoryLocation; /** @@ -131,4 +132,50 @@ public CreateLinkedIntegrationRuntimeRequest withDataFactoryLocation(String data */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("subscriptionId", this.subscriptionId); + jsonWriter.writeStringField("dataFactoryName", this.dataFactoryName); + jsonWriter.writeStringField("dataFactoryLocation", this.dataFactoryLocation); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CreateLinkedIntegrationRuntimeRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CreateLinkedIntegrationRuntimeRequest if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CreateLinkedIntegrationRuntimeRequest. + */ + public static CreateLinkedIntegrationRuntimeRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CreateLinkedIntegrationRuntimeRequest deserializedCreateLinkedIntegrationRuntimeRequest + = new CreateLinkedIntegrationRuntimeRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedCreateLinkedIntegrationRuntimeRequest.name = reader.getString(); + } else if ("subscriptionId".equals(fieldName)) { + deserializedCreateLinkedIntegrationRuntimeRequest.subscriptionId = reader.getString(); + } else if ("dataFactoryName".equals(fieldName)) { + deserializedCreateLinkedIntegrationRuntimeRequest.dataFactoryName = reader.getString(); + } else if ("dataFactoryLocation".equals(fieldName)) { + deserializedCreateLinkedIntegrationRuntimeRequest.dataFactoryLocation = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCreateLinkedIntegrationRuntimeRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java index e5cde8518cc5..8c01e64b308e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Credential.java @@ -5,14 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -21,37 +18,27 @@ * The Azure Data Factory nested object which contains the information and credential which can be used to connect with * related store or compute resource. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Credential.class, visible = true) -@JsonTypeName("Credential") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "ServicePrincipal", value = ServicePrincipalCredential.class), - @JsonSubTypes.Type(name = "ManagedIdentity", value = ManagedIdentityCredential.class) }) @Fluent -public class Credential { +public class Credential implements JsonSerializable { /* * Type of credential. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Credential"; /* * Credential description. */ - @JsonProperty(value = "description") private String description; /* * List of tags that can be used for describing the Credential. */ - @JsonProperty(value = "annotations") private List annotations; /* * The Azure Data Factory nested object which contains the information and credential which can be used to connect * with related store or compute resource. */ - @JsonIgnore private Map additionalProperties; /** @@ -115,7 +102,6 @@ public Credential withAnnotations(List annotations) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -132,14 +118,6 @@ public Credential withAdditionalProperties(Map additionalPropert return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -147,4 +125,85 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Credential from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Credential if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the Credential. + */ + public static Credential fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("ServicePrincipal".equals(discriminatorValue)) { + return ServicePrincipalCredential.fromJson(readerToUse.reset()); + } else if ("ManagedIdentity".equals(discriminatorValue)) { + return ManagedIdentityCredential.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static Credential fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Credential deserializedCredential = new Credential(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCredential.type = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedCredential.description = reader.getString(); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCredential.annotations = annotations; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCredential.additionalProperties = additionalProperties; + + return deserializedCredential; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java index 5371cf089a06..ea1d9c1a4e91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of credential resources. */ @Fluent -public final class CredentialListResponse { +public final class CredentialListResponse implements JsonSerializable { /* * List of credentials. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CredentialListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CredentialListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CredentialListResponse if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CredentialListResponse. + */ + public static CredentialListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CredentialListResponse deserializedCredentialListResponse = new CredentialListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> CredentialResourceInner.fromJson(reader1)); + deserializedCredentialListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedCredentialListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCredentialListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java index e0c50c70fa1c..7d1310d5e382 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReference.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -17,23 +18,20 @@ * Credential reference type. */ @Fluent -public final class CredentialReference { +public final class CredentialReference implements JsonSerializable { /* * Credential reference type. */ - @JsonProperty(value = "type", required = true) private CredentialReferenceType type; /* * Reference credential name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Credential reference type. */ - @JsonIgnore private Map additionalProperties; /** @@ -87,7 +85,6 @@ public CredentialReference withReferenceName(String referenceName) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -103,14 +100,6 @@ public CredentialReference withAdditionalProperties(Map addition return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -129,4 +118,55 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CredentialReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("referenceName", this.referenceName); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CredentialReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CredentialReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CredentialReference. + */ + public static CredentialReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CredentialReference deserializedCredentialReference = new CredentialReference(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCredentialReference.type = CredentialReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedCredentialReference.referenceName = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCredentialReference.additionalProperties = additionalProperties; + + return deserializedCredentialReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReferenceType.java index d6b19e75f4b8..bf23f21f8603 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CredentialReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public CredentialReferenceType() { * @param name a name to look for. * @return the corresponding CredentialReferenceType. */ - @JsonCreator public static CredentialReferenceType fromString(String name) { return fromString(name, CredentialReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java index 83282aead30d..9b37846fa5c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CustomActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Custom activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomActivity.class, visible = true) -@JsonTypeName("Custom") @Fluent public final class CustomActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Custom"; /* * Custom activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private CustomActivityTypeProperties innerTypeProperties = new CustomActivityTypeProperties(); /** @@ -318,4 +314,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CustomActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomActivity. + */ + public static CustomActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomActivity deserializedCustomActivity = new CustomActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedCustomActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedCustomActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedCustomActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedCustomActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedCustomActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedCustomActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedCustomActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedCustomActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedCustomActivity.innerTypeProperties = CustomActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCustomActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCustomActivity.withAdditionalProperties(additionalProperties); + + return deserializedCustomActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivityReferenceObject.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivityReferenceObject.java index 820901581e08..f92dbc9860dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivityReferenceObject.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomActivityReferenceObject.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Reference objects for custom activity. */ @Fluent -public final class CustomActivityReferenceObject { +public final class CustomActivityReferenceObject implements JsonSerializable { /* * Linked service references. */ - @JsonProperty(value = "linkedServices") private List linkedServices; /* * Dataset references. */ - @JsonProperty(value = "datasets") private List datasets; /** @@ -84,4 +86,48 @@ public void validate() { datasets().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("linkedServices", this.linkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("datasets", this.datasets, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomActivityReferenceObject from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomActivityReferenceObject if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the CustomActivityReferenceObject. + */ + public static CustomActivityReferenceObject fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomActivityReferenceObject deserializedCustomActivityReferenceObject + = new CustomActivityReferenceObject(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServices".equals(fieldName)) { + List linkedServices + = reader.readArray(reader1 -> LinkedServiceReference.fromJson(reader1)); + deserializedCustomActivityReferenceObject.linkedServices = linkedServices; + } else if ("datasets".equals(fieldName)) { + List datasets = reader.readArray(reader1 -> DatasetReference.fromJson(reader1)); + deserializedCustomActivityReferenceObject.datasets = datasets; + } else { + reader.skipChildren(); + } + } + + return deserializedCustomActivityReferenceObject; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java index 2cfe9ea59351..7757b3e9ce91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataSourceLinkedService.java @@ -6,35 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Custom linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = CustomDataSourceLinkedService.class, - visible = true) -@JsonTypeName("CustomDataSource") @Fluent public final class CustomDataSourceLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CustomDataSource"; /* * Custom linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private Object typeProperties; /** @@ -125,4 +117,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CustomDataSourceLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("typeProperties", this.typeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomDataSourceLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomDataSourceLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomDataSourceLinkedService. + */ + public static CustomDataSourceLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomDataSourceLinkedService deserializedCustomDataSourceLinkedService + = new CustomDataSourceLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedCustomDataSourceLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCustomDataSourceLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCustomDataSourceLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCustomDataSourceLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedCustomDataSourceLinkedService.typeProperties = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedCustomDataSourceLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCustomDataSourceLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedCustomDataSourceLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java index 4128e7775c57..fe886306da4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomDataset.java @@ -5,31 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The custom dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomDataset.class, visible = true) -@JsonTypeName("CustomDataset") @Fluent public final class CustomDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CustomDataset"; /* * Custom dataset properties. */ - @JsonProperty(value = "typeProperties") private Object typeProperties; /** @@ -140,4 +136,79 @@ public CustomDataset withFolder(DatasetFolder folder) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("typeProperties", this.typeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomDataset. + */ + public static CustomDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomDataset deserializedCustomDataset = new CustomDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedCustomDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedCustomDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedCustomDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedCustomDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedCustomDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCustomDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedCustomDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedCustomDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedCustomDataset.typeProperties = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCustomDataset.withAdditionalProperties(additionalProperties); + + return deserializedCustomDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java index 788f397dd147..0b13856d77c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomEventsTrigger.java @@ -6,33 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.CustomEventsTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that runs every time a custom event is received. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomEventsTrigger.class, visible = true) -@JsonTypeName("CustomEventsTrigger") @Fluent public final class CustomEventsTrigger extends MultiplePipelineTrigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CustomEventsTrigger"; /* * Custom Events Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private CustomEventsTriggerTypeProperties innerTypeProperties = new CustomEventsTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of CustomEventsTrigger class. */ @@ -58,6 +60,17 @@ private CustomEventsTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -199,4 +212,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(CustomEventsTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("pipelines", pipelines(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomEventsTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomEventsTrigger if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the CustomEventsTrigger. + */ + public static CustomEventsTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomEventsTrigger deserializedCustomEventsTrigger = new CustomEventsTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedCustomEventsTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedCustomEventsTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedCustomEventsTrigger.withAnnotations(annotations); + } else if ("pipelines".equals(fieldName)) { + List pipelines + = reader.readArray(reader1 -> TriggerPipelineReference.fromJson(reader1)); + deserializedCustomEventsTrigger.withPipelines(pipelines); + } else if ("typeProperties".equals(fieldName)) { + deserializedCustomEventsTrigger.innerTypeProperties + = CustomEventsTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedCustomEventsTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedCustomEventsTrigger.withAdditionalProperties(additionalProperties); + + return deserializedCustomEventsTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java index b14439a2811f..7ba1dd84f111 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/CustomSetupBase.java @@ -5,29 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The base definition of the custom setup. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = CustomSetupBase.class, visible = true) -@JsonTypeName("CustomSetupBase") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "CmdkeySetup", value = CmdkeySetup.class), - @JsonSubTypes.Type(name = "EnvironmentVariableSetup", value = EnvironmentVariableSetup.class), - @JsonSubTypes.Type(name = "ComponentSetup", value = ComponentSetup.class), - @JsonSubTypes.Type(name = "AzPowerShellSetup", value = AzPowerShellSetup.class) }) @Immutable -public class CustomSetupBase { +public class CustomSetupBase implements JsonSerializable { /* * The type of custom setup. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "CustomSetupBase"; /** @@ -52,4 +43,71 @@ public String type() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of CustomSetupBase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of CustomSetupBase if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the CustomSetupBase. + */ + public static CustomSetupBase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("CmdkeySetup".equals(discriminatorValue)) { + return CmdkeySetup.fromJson(readerToUse.reset()); + } else if ("EnvironmentVariableSetup".equals(discriminatorValue)) { + return EnvironmentVariableSetup.fromJson(readerToUse.reset()); + } else if ("ComponentSetup".equals(discriminatorValue)) { + return ComponentSetup.fromJson(readerToUse.reset()); + } else if ("AzPowerShellSetup".equals(discriminatorValue)) { + return AzPowerShellSetup.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static CustomSetupBase fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + CustomSetupBase deserializedCustomSetupBase = new CustomSetupBase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedCustomSetupBase.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedCustomSetupBase; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandDefaultValue.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandDefaultValue.java index 51e15df1455c..5017c6b561d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandDefaultValue.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandDefaultValue.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Default value. */ @Fluent -public final class DWCopyCommandDefaultValue { +public final class DWCopyCommandDefaultValue implements JsonSerializable { /* * Column name. Type: object (or Expression with resultType string). */ - @JsonProperty(value = "columnName") private Object columnName; /* * The default value of the column. Type: object (or Expression with resultType string). */ - @JsonProperty(value = "defaultValue") private Object defaultValue; /** @@ -79,4 +81,43 @@ public DWCopyCommandDefaultValue withDefaultValue(Object defaultValue) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("columnName", this.columnName); + jsonWriter.writeUntypedField("defaultValue", this.defaultValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DWCopyCommandDefaultValue from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DWCopyCommandDefaultValue if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DWCopyCommandDefaultValue. + */ + public static DWCopyCommandDefaultValue fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DWCopyCommandDefaultValue deserializedDWCopyCommandDefaultValue = new DWCopyCommandDefaultValue(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("columnName".equals(fieldName)) { + deserializedDWCopyCommandDefaultValue.columnName = reader.readUntyped(); + } else if ("defaultValue".equals(fieldName)) { + deserializedDWCopyCommandDefaultValue.defaultValue = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDWCopyCommandDefaultValue; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java index 6d36f164cb62..8a08908bce55 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DWCopyCommandSettings.java @@ -5,8 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -14,13 +17,12 @@ * DW Copy Command settings. */ @Fluent -public final class DWCopyCommandSettings { +public final class DWCopyCommandSettings implements JsonSerializable { /* * Specifies the default values for each target column in SQL DW. The default values in the property overwrite the * DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or * Expression with resultType array of objects). */ - @JsonProperty(value = "defaultValues") private List defaultValues; /* @@ -28,8 +30,6 @@ public final class DWCopyCommandSettings { * (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": * "'ymd'" } */ - @JsonProperty(value = "additionalOptions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalOptions; /** @@ -96,4 +96,47 @@ public void validate() { defaultValues().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("defaultValues", this.defaultValues, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("additionalOptions", this.additionalOptions, + (writer, element) -> writer.writeString(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DWCopyCommandSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DWCopyCommandSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the DWCopyCommandSettings. + */ + public static DWCopyCommandSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DWCopyCommandSettings deserializedDWCopyCommandSettings = new DWCopyCommandSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("defaultValues".equals(fieldName)) { + List defaultValues + = reader.readArray(reader1 -> DWCopyCommandDefaultValue.fromJson(reader1)); + deserializedDWCopyCommandSettings.defaultValues = defaultValues; + } else if ("additionalOptions".equals(fieldName)) { + Map additionalOptions = reader.readMap(reader1 -> reader1.getString()); + deserializedDWCopyCommandSettings.additionalOptions = additionalOptions; + } else { + reader.skipChildren(); + } + } + + return deserializedDWCopyCommandSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java index 5a789a26ff51..f61fc9d9f2ca 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlow.java @@ -5,47 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Azure Data Factory nested object which contains a flow with data movements and transformations. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DataFlow.class, visible = true) -@JsonTypeName("DataFlow") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "MappingDataFlow", value = MappingDataFlow.class), - @JsonSubTypes.Type(name = "Flowlet", value = Flowlet.class), - @JsonSubTypes.Type(name = "WranglingDataFlow", value = WranglingDataFlow.class) }) @Fluent -public class DataFlow { +public class DataFlow implements JsonSerializable { /* * Type of data flow. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DataFlow"; /* * The description of the data flow. */ - @JsonProperty(value = "description") private String description; /* * List of tags that can be used for describing the data flow. */ - @JsonProperty(value = "annotations") private List annotations; /* * The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ - @JsonProperty(value = "folder") private DataFlowFolder folder; /** @@ -135,4 +124,79 @@ public void validate() { folder().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", this.folder); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlow from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlow if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the DataFlow. + */ + public static DataFlow fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("MappingDataFlow".equals(discriminatorValue)) { + return MappingDataFlow.fromJson(readerToUse.reset()); + } else if ("Flowlet".equals(discriminatorValue)) { + return Flowlet.fromJson(readerToUse.reset()); + } else if ("WranglingDataFlow".equals(discriminatorValue)) { + return WranglingDataFlow.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static DataFlow fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlow deserializedDataFlow = new DataFlow(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDataFlow.type = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedDataFlow.description = reader.getString(); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDataFlow.annotations = annotations; + } else if ("folder".equals(fieldName)) { + deserializedDataFlow.folder = DataFlowFolder.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlow; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowComputeType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowComputeType.java index d6d40bd9b46b..b77c95e82efc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowComputeType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowComputeType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public DataFlowComputeType() { * @param name a name to look for. * @return the corresponding DataFlowComputeType. */ - @JsonCreator public static DataFlowComputeType fromString(String name) { return fromString(name, DataFlowComputeType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java index 03bcffda40bd..1fb44e16cb21 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandPayload.java @@ -6,36 +6,36 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Structure of command payload. */ @Fluent -public final class DataFlowDebugCommandPayload { +public final class DataFlowDebugCommandPayload implements JsonSerializable { /* * The stream name which is used for preview. */ - @JsonProperty(value = "streamName", required = true) private String streamName; /* * Row limits for preview response. */ - @JsonProperty(value = "rowLimits") private Integer rowLimits; /* * Array of column names. */ - @JsonProperty(value = "columns") private List columns; /* * The expression which is used for preview. */ - @JsonProperty(value = "expression") private String expression; /** @@ -138,4 +138,51 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataFlowDebugCommandPayload.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("streamName", this.streamName); + jsonWriter.writeNumberField("rowLimits", this.rowLimits); + jsonWriter.writeArrayField("columns", this.columns, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("expression", this.expression); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugCommandPayload from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugCommandPayload if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowDebugCommandPayload. + */ + public static DataFlowDebugCommandPayload fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugCommandPayload deserializedDataFlowDebugCommandPayload = new DataFlowDebugCommandPayload(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("streamName".equals(fieldName)) { + deserializedDataFlowDebugCommandPayload.streamName = reader.getString(); + } else if ("rowLimits".equals(fieldName)) { + deserializedDataFlowDebugCommandPayload.rowLimits = reader.getNullable(JsonReader::getInt); + } else if ("columns".equals(fieldName)) { + List columns = reader.readArray(reader1 -> reader1.getString()); + deserializedDataFlowDebugCommandPayload.columns = columns; + } else if ("expression".equals(fieldName)) { + deserializedDataFlowDebugCommandPayload.expression = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowDebugCommandPayload; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandRequest.java index 7a115fa4840c..8e64c1b7c60e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandRequest.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Request body structure for data flow debug command. */ @Fluent -public final class DataFlowDebugCommandRequest { +public final class DataFlowDebugCommandRequest implements JsonSerializable { /* * The ID of data flow debug session. */ - @JsonProperty(value = "sessionId") private String sessionId; /* * The command type. */ - @JsonProperty(value = "command") private DataFlowDebugCommandType command; /* * The command payload object. */ - @JsonProperty(value = "commandPayload") private DataFlowDebugCommandPayload commandPayload; /** @@ -106,4 +107,48 @@ public void validate() { commandPayload().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("sessionId", this.sessionId); + jsonWriter.writeStringField("command", this.command == null ? null : this.command.toString()); + jsonWriter.writeJsonField("commandPayload", this.commandPayload); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugCommandRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugCommandRequest if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowDebugCommandRequest. + */ + public static DataFlowDebugCommandRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugCommandRequest deserializedDataFlowDebugCommandRequest = new DataFlowDebugCommandRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sessionId".equals(fieldName)) { + deserializedDataFlowDebugCommandRequest.sessionId = reader.getString(); + } else if ("command".equals(fieldName)) { + deserializedDataFlowDebugCommandRequest.command + = DataFlowDebugCommandType.fromString(reader.getString()); + } else if ("commandPayload".equals(fieldName)) { + deserializedDataFlowDebugCommandRequest.commandPayload + = DataFlowDebugCommandPayload.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowDebugCommandRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandType.java index 67f8962a20cc..2dd935737ba4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugCommandType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public DataFlowDebugCommandType() { * @param name a name to look for. * @return the corresponding DataFlowDebugCommandType. */ - @JsonCreator public static DataFlowDebugCommandType fromString(String name) { return fromString(name, DataFlowDebugCommandType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackage.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackage.java index ef4528e62911..b42224309665 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackage.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackage.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,53 +18,45 @@ * Request body structure for starting data flow debug session. */ @Fluent -public final class DataFlowDebugPackage { +public final class DataFlowDebugPackage implements JsonSerializable { /* * The ID of data flow debug session. */ - @JsonProperty(value = "sessionId") private String sessionId; /* * Data flow instance. */ - @JsonProperty(value = "dataFlow") private DataFlowDebugResource dataFlow; /* * List of Data flows */ - @JsonProperty(value = "dataFlows") private List dataFlows; /* * List of datasets. */ - @JsonProperty(value = "datasets") private List datasets; /* * List of linked services. */ - @JsonProperty(value = "linkedServices") private List linkedServices; /* * Staging info for debug session. */ - @JsonProperty(value = "staging") private DataFlowStagingInfo staging; /* * Data flow debug settings. */ - @JsonProperty(value = "debugSettings") private DataFlowDebugPackageDebugSettings debugSettings; /* * Request body structure for starting data flow debug session. */ - @JsonIgnore private Map additionalProperties; /** @@ -217,7 +210,6 @@ public DataFlowDebugPackage withDebugSettings(DataFlowDebugPackageDebugSettings * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -233,14 +225,6 @@ public DataFlowDebugPackage withAdditionalProperties(Map additio return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -266,4 +250,76 @@ public void validate() { debugSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("sessionId", this.sessionId); + jsonWriter.writeJsonField("dataFlow", this.dataFlow); + jsonWriter.writeArrayField("dataFlows", this.dataFlows, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("datasets", this.datasets, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("linkedServices", this.linkedServices, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("staging", this.staging); + jsonWriter.writeJsonField("debugSettings", this.debugSettings); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugPackage from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugPackage if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowDebugPackage. + */ + public static DataFlowDebugPackage fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugPackage deserializedDataFlowDebugPackage = new DataFlowDebugPackage(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sessionId".equals(fieldName)) { + deserializedDataFlowDebugPackage.sessionId = reader.getString(); + } else if ("dataFlow".equals(fieldName)) { + deserializedDataFlowDebugPackage.dataFlow = DataFlowDebugResource.fromJson(reader); + } else if ("dataFlows".equals(fieldName)) { + List dataFlows + = reader.readArray(reader1 -> DataFlowDebugResource.fromJson(reader1)); + deserializedDataFlowDebugPackage.dataFlows = dataFlows; + } else if ("datasets".equals(fieldName)) { + List datasets + = reader.readArray(reader1 -> DatasetDebugResource.fromJson(reader1)); + deserializedDataFlowDebugPackage.datasets = datasets; + } else if ("linkedServices".equals(fieldName)) { + List linkedServices + = reader.readArray(reader1 -> LinkedServiceDebugResource.fromJson(reader1)); + deserializedDataFlowDebugPackage.linkedServices = linkedServices; + } else if ("staging".equals(fieldName)) { + deserializedDataFlowDebugPackage.staging = DataFlowStagingInfo.fromJson(reader); + } else if ("debugSettings".equals(fieldName)) { + deserializedDataFlowDebugPackage.debugSettings = DataFlowDebugPackageDebugSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataFlowDebugPackage.additionalProperties = additionalProperties; + + return deserializedDataFlowDebugPackage; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackageDebugSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackageDebugSettings.java index 1eadd756d965..133098538f70 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackageDebugSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugPackageDebugSettings.java @@ -5,8 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; import java.util.Map; @@ -14,24 +17,20 @@ * Data flow debug settings. */ @Fluent -public final class DataFlowDebugPackageDebugSettings { +public final class DataFlowDebugPackageDebugSettings implements JsonSerializable { /* * Source setting for data flow debug. */ - @JsonProperty(value = "sourceSettings") private List sourceSettings; /* * Data flow parameters. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * Parameters for dataset. */ - @JsonProperty(value = "datasetParameters") private Object datasetParameters; /** @@ -110,4 +109,51 @@ public void validate() { sourceSettings().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sourceSettings", this.sourceSettings, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("datasetParameters", this.datasetParameters); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugPackageDebugSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugPackageDebugSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowDebugPackageDebugSettings. + */ + public static DataFlowDebugPackageDebugSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugPackageDebugSettings deserializedDataFlowDebugPackageDebugSettings + = new DataFlowDebugPackageDebugSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceSettings".equals(fieldName)) { + List sourceSettings + = reader.readArray(reader1 -> DataFlowSourceSetting.fromJson(reader1)); + deserializedDataFlowDebugPackageDebugSettings.sourceSettings = sourceSettings; + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedDataFlowDebugPackageDebugSettings.parameters = parameters; + } else if ("datasetParameters".equals(fieldName)) { + deserializedDataFlowDebugPackageDebugSettings.datasetParameters = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowDebugPackageDebugSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java index 213e2a3adbfd..ccd6754779f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowDebugResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Data flow debug resource. @@ -16,7 +19,6 @@ public final class DataFlowDebugResource extends SubResourceDebugResource { /* * Data flow properties. */ - @JsonProperty(value = "properties", required = true) private DataFlow properties; /** @@ -72,4 +74,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataFlowDebugResource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowDebugResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowDebugResource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowDebugResource. + */ + public static DataFlowDebugResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowDebugResource deserializedDataFlowDebugResource = new DataFlowDebugResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDataFlowDebugResource.withName(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedDataFlowDebugResource.properties = DataFlow.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowDebugResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowFolder.java index 62ccc4deedea..f40d8ca19466 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowFolder.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ @Fluent -public final class DataFlowFolder { +public final class DataFlowFolder implements JsonSerializable { /* * The name of the folder that this data flow is in. */ - @JsonProperty(value = "name") private String name; /** @@ -51,4 +54,40 @@ public DataFlowFolder withName(String name) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowFolder from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowFolder if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowFolder. + */ + public static DataFlowFolder fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowFolder deserializedDataFlowFolder = new DataFlowFolder(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDataFlowFolder.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowFolder; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java index f36032916a3b..69257197b242 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DataFlowResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of data flow resources. */ @Fluent -public final class DataFlowListResponse { +public final class DataFlowListResponse implements JsonSerializable { /* * List of data flows. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataFlowListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowListResponse if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowListResponse. + */ + public static DataFlowListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowListResponse deserializedDataFlowListResponse = new DataFlowListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> DataFlowResourceInner.fromJson(reader1)); + deserializedDataFlowListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedDataFlowListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java index 428eef6b0ba5..584e22511f07 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReference.java @@ -6,11 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -18,36 +18,30 @@ * Data flow reference type. */ @Fluent -public final class DataFlowReference { +public final class DataFlowReference implements JsonSerializable { /* * Data flow reference type. */ - @JsonProperty(value = "type", required = true) private DataFlowReferenceType type; /* * Reference data flow name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Reference data flow parameters from dataset. */ - @JsonProperty(value = "datasetParameters") private Object datasetParameters; /* * Data flow parameters */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * Data flow reference type. */ - @JsonIgnore private Map additionalProperties; /** @@ -141,7 +135,6 @@ public DataFlowReference withParameters(Map parameters) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -157,14 +150,6 @@ public DataFlowReference withAdditionalProperties(Map additional return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -183,4 +168,62 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataFlowReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("referenceName", this.referenceName); + jsonWriter.writeUntypedField("datasetParameters", this.datasetParameters); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowReference. + */ + public static DataFlowReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowReference deserializedDataFlowReference = new DataFlowReference(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDataFlowReference.type = DataFlowReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedDataFlowReference.referenceName = reader.getString(); + } else if ("datasetParameters".equals(fieldName)) { + deserializedDataFlowReference.datasetParameters = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedDataFlowReference.parameters = parameters; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataFlowReference.additionalProperties = additionalProperties; + + return deserializedDataFlowReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReferenceType.java index 7f47698e825b..8dd34daaebdb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public DataFlowReferenceType() { * @param name a name to look for. * @return the corresponding DataFlowReferenceType. */ - @JsonCreator public static DataFlowReferenceType fromString(String name) { return fromString(name, DataFlowReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSink.java index 1a35e3c71380..68c4e15cb284 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSink.java @@ -5,7 +5,10 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Transformation for data flow sink. @@ -15,13 +18,11 @@ public class DataFlowSink extends Transformation { /* * Schema linked service reference. */ - @JsonProperty(value = "schemaLinkedService") private LinkedServiceReference schemaLinkedService; /* * Rejected data linked service reference. */ - @JsonProperty(value = "rejectedDataLinkedService") private LinkedServiceReference rejectedDataLinkedService; /** @@ -130,4 +131,59 @@ public void validate() { rejectedDataLinkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeJsonField("dataset", dataset()); + jsonWriter.writeJsonField("linkedService", linkedService()); + jsonWriter.writeJsonField("flowlet", flowlet()); + jsonWriter.writeJsonField("schemaLinkedService", this.schemaLinkedService); + jsonWriter.writeJsonField("rejectedDataLinkedService", this.rejectedDataLinkedService); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowSink. + */ + public static DataFlowSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowSink deserializedDataFlowSink = new DataFlowSink(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDataFlowSink.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDataFlowSink.withDescription(reader.getString()); + } else if ("dataset".equals(fieldName)) { + deserializedDataFlowSink.withDataset(DatasetReference.fromJson(reader)); + } else if ("linkedService".equals(fieldName)) { + deserializedDataFlowSink.withLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("flowlet".equals(fieldName)) { + deserializedDataFlowSink.withFlowlet(DataFlowReference.fromJson(reader)); + } else if ("schemaLinkedService".equals(fieldName)) { + deserializedDataFlowSink.schemaLinkedService = LinkedServiceReference.fromJson(reader); + } else if ("rejectedDataLinkedService".equals(fieldName)) { + deserializedDataFlowSink.rejectedDataLinkedService = LinkedServiceReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSource.java index fd7a1b5efe5d..e8da7950d3ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSource.java @@ -5,7 +5,10 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Transformation for data flow source. @@ -15,7 +18,6 @@ public class DataFlowSource extends Transformation { /* * Schema linked service reference. */ - @JsonProperty(value = "schemaLinkedService") private LinkedServiceReference schemaLinkedService; /** @@ -101,4 +103,56 @@ public void validate() { schemaLinkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeJsonField("dataset", dataset()); + jsonWriter.writeJsonField("linkedService", linkedService()); + jsonWriter.writeJsonField("flowlet", flowlet()); + jsonWriter.writeJsonField("schemaLinkedService", this.schemaLinkedService); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataFlowSource. + */ + public static DataFlowSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowSource deserializedDataFlowSource = new DataFlowSource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDataFlowSource.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDataFlowSource.withDescription(reader.getString()); + } else if ("dataset".equals(fieldName)) { + deserializedDataFlowSource.withDataset(DatasetReference.fromJson(reader)); + } else if ("linkedService".equals(fieldName)) { + deserializedDataFlowSource.withLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("flowlet".equals(fieldName)) { + deserializedDataFlowSource.withFlowlet(DataFlowReference.fromJson(reader)); + } else if ("schemaLinkedService".equals(fieldName)) { + deserializedDataFlowSource.schemaLinkedService = LinkedServiceReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSourceSetting.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSourceSetting.java index a4f15c8b4d81..9db70c1f7ef7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSourceSetting.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowSourceSetting.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,23 +17,20 @@ * Definition of data flow source setting for debug. */ @Fluent -public final class DataFlowSourceSetting { +public final class DataFlowSourceSetting implements JsonSerializable { /* * The data flow source name. */ - @JsonProperty(value = "sourceName") private String sourceName; /* * Defines the row limit of data flow source in debug. */ - @JsonProperty(value = "rowLimit") private Integer rowLimit; /* * Definition of data flow source setting for debug. */ - @JsonIgnore private Map additionalProperties; /** @@ -86,7 +84,6 @@ public DataFlowSourceSetting withRowLimit(Integer rowLimit) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -102,14 +99,6 @@ public DataFlowSourceSetting withAdditionalProperties(Map additi return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -117,4 +106,54 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("sourceName", this.sourceName); + jsonWriter.writeNumberField("rowLimit", this.rowLimit); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowSourceSetting from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowSourceSetting if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowSourceSetting. + */ + public static DataFlowSourceSetting fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowSourceSetting deserializedDataFlowSourceSetting = new DataFlowSourceSetting(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceName".equals(fieldName)) { + deserializedDataFlowSourceSetting.sourceName = reader.getString(); + } else if ("rowLimit".equals(fieldName)) { + deserializedDataFlowSourceSetting.rowLimit = reader.getNullable(JsonReader::getInt); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataFlowSourceSetting.additionalProperties = additionalProperties; + + return deserializedDataFlowSourceSetting; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowStagingInfo.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowStagingInfo.java index 8103fdac50c8..2bd5ad5a6ca6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowStagingInfo.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataFlowStagingInfo.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Staging info for execute data flow activity. */ @Fluent -public final class DataFlowStagingInfo { +public final class DataFlowStagingInfo implements JsonSerializable { /* * Staging linked service reference. */ - @JsonProperty(value = "linkedService") private LinkedServiceReference linkedService; /* * Folder path for staging blob. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "folderPath") private Object folderPath; /** @@ -80,4 +82,43 @@ public void validate() { linkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedService", this.linkedService); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataFlowStagingInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataFlowStagingInfo if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DataFlowStagingInfo. + */ + public static DataFlowStagingInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataFlowStagingInfo deserializedDataFlowStagingInfo = new DataFlowStagingInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedService".equals(fieldName)) { + deserializedDataFlowStagingInfo.linkedService = LinkedServiceReference.fromJson(reader); + } else if ("folderPath".equals(fieldName)) { + deserializedDataFlowStagingInfo.folderPath = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataFlowStagingInfo; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java index b1b9b3001408..3de87c01bc9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataLakeAnalyticsUsqlActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DataLakeAnalyticsUsqlActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Data Lake Analytics U-SQL activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DataLakeAnalyticsUsqlActivity.class, - visible = true) -@JsonTypeName("DataLakeAnalyticsU-SQL") @Fluent public final class DataLakeAnalyticsUsqlActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DataLakeAnalyticsU-SQL"; /* * Data Lake Analytics U-SQL activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private DataLakeAnalyticsUsqlActivityTypeProperties innerTypeProperties = new DataLakeAnalyticsUsqlActivityTypeProperties(); @@ -327,4 +319,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataLakeAnalyticsUsqlActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataLakeAnalyticsUsqlActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataLakeAnalyticsUsqlActivity if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataLakeAnalyticsUsqlActivity. + */ + public static DataLakeAnalyticsUsqlActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataLakeAnalyticsUsqlActivity deserializedDataLakeAnalyticsUsqlActivity + = new DataLakeAnalyticsUsqlActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedDataLakeAnalyticsUsqlActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedDataLakeAnalyticsUsqlActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.innerTypeProperties + = DataLakeAnalyticsUsqlActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDataLakeAnalyticsUsqlActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataLakeAnalyticsUsqlActivity.withAdditionalProperties(additionalProperties); + + return deserializedDataLakeAnalyticsUsqlActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataMapperMapping.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataMapperMapping.java index 266728f20fbf..149e85be19aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataMapperMapping.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataMapperMapping.java @@ -5,41 +5,40 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Source and target table mapping details. */ @Fluent -public final class DataMapperMapping { +public final class DataMapperMapping implements JsonSerializable { /* * Name of the target table */ - @JsonProperty(value = "targetEntityName") private String targetEntityName; /* * Name of the source table */ - @JsonProperty(value = "sourceEntityName") private String sourceEntityName; /* * The connection reference for the source connection. */ - @JsonProperty(value = "sourceConnectionReference") private MapperConnectionReference sourceConnectionReference; /* * This holds the user provided attribute mapping information. */ - @JsonProperty(value = "attributeMappingInfo") private MapperAttributeMappings attributeMappingInfo; /* * This holds the source denormalization information used while joining multiple sources. */ - @JsonProperty(value = "sourceDenormalizeInfo") private Object sourceDenormalizeInfo; /** @@ -163,4 +162,53 @@ public void validate() { attributeMappingInfo().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("targetEntityName", this.targetEntityName); + jsonWriter.writeStringField("sourceEntityName", this.sourceEntityName); + jsonWriter.writeJsonField("sourceConnectionReference", this.sourceConnectionReference); + jsonWriter.writeJsonField("attributeMappingInfo", this.attributeMappingInfo); + jsonWriter.writeUntypedField("sourceDenormalizeInfo", this.sourceDenormalizeInfo); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataMapperMapping from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataMapperMapping if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DataMapperMapping. + */ + public static DataMapperMapping fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataMapperMapping deserializedDataMapperMapping = new DataMapperMapping(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("targetEntityName".equals(fieldName)) { + deserializedDataMapperMapping.targetEntityName = reader.getString(); + } else if ("sourceEntityName".equals(fieldName)) { + deserializedDataMapperMapping.sourceEntityName = reader.getString(); + } else if ("sourceConnectionReference".equals(fieldName)) { + deserializedDataMapperMapping.sourceConnectionReference + = MapperConnectionReference.fromJson(reader); + } else if ("attributeMappingInfo".equals(fieldName)) { + deserializedDataMapperMapping.attributeMappingInfo = MapperAttributeMappings.fromJson(reader); + } else if ("sourceDenormalizeInfo".equals(fieldName)) { + deserializedDataMapperMapping.sourceDenormalizeInfo = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDataMapperMapping; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java index f446b726159a..2405a70c29af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksNotebookActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksNotebookActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * DatabricksNotebook activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DatabricksNotebookActivity.class, - visible = true) -@JsonTypeName("DatabricksNotebook") @Fluent public final class DatabricksNotebookActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DatabricksNotebook"; /* * Databricks Notebook activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private DatabricksNotebookActivityTypeProperties innerTypeProperties = new DatabricksNotebookActivityTypeProperties(); @@ -227,4 +219,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksNotebookActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksNotebookActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksNotebookActivity if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksNotebookActivity. + */ + public static DatabricksNotebookActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksNotebookActivity deserializedDatabricksNotebookActivity = new DatabricksNotebookActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatabricksNotebookActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDatabricksNotebookActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedDatabricksNotebookActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedDatabricksNotebookActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedDatabricksNotebookActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedDatabricksNotebookActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedDatabricksNotebookActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedDatabricksNotebookActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDatabricksNotebookActivity.innerTypeProperties + = DatabricksNotebookActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDatabricksNotebookActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatabricksNotebookActivity.withAdditionalProperties(additionalProperties); + + return deserializedDatabricksNotebookActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java index 2bc960e8e707..8fef739723b8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkJarActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkJarActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * DatabricksSparkJar activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DatabricksSparkJarActivity.class, - visible = true) -@JsonTypeName("DatabricksSparkJar") @Fluent public final class DatabricksSparkJarActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DatabricksSparkJar"; /* * Databricks SparkJar activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private DatabricksSparkJarActivityTypeProperties innerTypeProperties = new DatabricksSparkJarActivityTypeProperties(); @@ -225,4 +217,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksSparkJarActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksSparkJarActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksSparkJarActivity if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksSparkJarActivity. + */ + public static DatabricksSparkJarActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksSparkJarActivity deserializedDatabricksSparkJarActivity = new DatabricksSparkJarActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedDatabricksSparkJarActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedDatabricksSparkJarActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedDatabricksSparkJarActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedDatabricksSparkJarActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.innerTypeProperties + = DatabricksSparkJarActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDatabricksSparkJarActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatabricksSparkJarActivity.withAdditionalProperties(additionalProperties); + + return deserializedDatabricksSparkJarActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java index fd2ddf33ba75..0d22b6992e91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatabricksSparkPythonActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkPythonActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * DatabricksSparkPython activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DatabricksSparkPythonActivity.class, - visible = true) -@JsonTypeName("DatabricksSparkPython") @Fluent public final class DatabricksSparkPythonActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DatabricksSparkPython"; /* * Databricks SparkPython activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private DatabricksSparkPythonActivityTypeProperties innerTypeProperties = new DatabricksSparkPythonActivityTypeProperties(); @@ -225,4 +217,87 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatabricksSparkPythonActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatabricksSparkPythonActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatabricksSparkPythonActivity if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatabricksSparkPythonActivity. + */ + public static DatabricksSparkPythonActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatabricksSparkPythonActivity deserializedDatabricksSparkPythonActivity + = new DatabricksSparkPythonActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedDatabricksSparkPythonActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedDatabricksSparkPythonActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.innerTypeProperties + = DatabricksSparkPythonActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDatabricksSparkPythonActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatabricksSparkPythonActivity.withAdditionalProperties(additionalProperties); + + return deserializedDatabricksSparkPythonActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java index 6d3f6e8d22e6..0bed202f73a3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Dataset.java @@ -6,15 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -23,172 +19,54 @@ * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, * folders, and documents. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Dataset.class, visible = true) -@JsonTypeName("Dataset") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AmazonS3Object", value = AmazonS3Dataset.class), - @JsonSubTypes.Type(name = "Avro", value = AvroDataset.class), - @JsonSubTypes.Type(name = "Excel", value = ExcelDataset.class), - @JsonSubTypes.Type(name = "Parquet", value = ParquetDataset.class), - @JsonSubTypes.Type(name = "DelimitedText", value = DelimitedTextDataset.class), - @JsonSubTypes.Type(name = "Json", value = JsonDataset.class), - @JsonSubTypes.Type(name = "Xml", value = XmlDataset.class), - @JsonSubTypes.Type(name = "Orc", value = OrcDataset.class), - @JsonSubTypes.Type(name = "Binary", value = BinaryDataset.class), - @JsonSubTypes.Type(name = "AzureBlob", value = AzureBlobDataset.class), - @JsonSubTypes.Type(name = "AzureTable", value = AzureTableDataset.class), - @JsonSubTypes.Type(name = "AzureSqlTable", value = AzureSqlTableDataset.class), - @JsonSubTypes.Type(name = "AzureSqlMITable", value = AzureSqlMITableDataset.class), - @JsonSubTypes.Type(name = "AzureSqlDWTable", value = AzureSqlDWTableDataset.class), - @JsonSubTypes.Type(name = "CassandraTable", value = CassandraTableDataset.class), - @JsonSubTypes.Type(name = "CustomDataset", value = CustomDataset.class), - @JsonSubTypes.Type(name = "CosmosDbSqlApiCollection", value = CosmosDbSqlApiCollectionDataset.class), - @JsonSubTypes.Type(name = "DocumentDbCollection", value = DocumentDbCollectionDataset.class), - @JsonSubTypes.Type(name = "DynamicsEntity", value = DynamicsEntityDataset.class), - @JsonSubTypes.Type(name = "DynamicsCrmEntity", value = DynamicsCrmEntityDataset.class), - @JsonSubTypes.Type(name = "CommonDataServiceForAppsEntity", value = CommonDataServiceForAppsEntityDataset.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreFile", value = AzureDataLakeStoreDataset.class), - @JsonSubTypes.Type(name = "AzureBlobFSFile", value = AzureBlobFSDataset.class), - @JsonSubTypes.Type(name = "Office365Table", value = Office365Dataset.class), - @JsonSubTypes.Type(name = "FileShare", value = FileShareDataset.class), - @JsonSubTypes.Type(name = "MongoDbCollection", value = MongoDbCollectionDataset.class), - @JsonSubTypes.Type(name = "MongoDbAtlasCollection", value = MongoDbAtlasCollectionDataset.class), - @JsonSubTypes.Type(name = "MongoDbV2Collection", value = MongoDbV2CollectionDataset.class), - @JsonSubTypes.Type(name = "CosmosDbMongoDbApiCollection", value = CosmosDbMongoDbApiCollectionDataset.class), - @JsonSubTypes.Type(name = "ODataResource", value = ODataResourceDataset.class), - @JsonSubTypes.Type(name = "OracleTable", value = OracleTableDataset.class), - @JsonSubTypes.Type(name = "AmazonRdsForOracleTable", value = AmazonRdsForOracleTableDataset.class), - @JsonSubTypes.Type(name = "TeradataTable", value = TeradataTableDataset.class), - @JsonSubTypes.Type(name = "AzureMySqlTable", value = AzureMySqlTableDataset.class), - @JsonSubTypes.Type(name = "AmazonRedshiftTable", value = AmazonRedshiftTableDataset.class), - @JsonSubTypes.Type(name = "Db2Table", value = Db2TableDataset.class), - @JsonSubTypes.Type(name = "RelationalTable", value = RelationalTableDataset.class), - @JsonSubTypes.Type(name = "InformixTable", value = InformixTableDataset.class), - @JsonSubTypes.Type(name = "OdbcTable", value = OdbcTableDataset.class), - @JsonSubTypes.Type(name = "MySqlTable", value = MySqlTableDataset.class), - @JsonSubTypes.Type(name = "PostgreSqlTable", value = PostgreSqlTableDataset.class), - @JsonSubTypes.Type(name = "PostgreSqlV2Table", value = PostgreSqlV2TableDataset.class), - @JsonSubTypes.Type(name = "MicrosoftAccessTable", value = MicrosoftAccessTableDataset.class), - @JsonSubTypes.Type(name = "SalesforceObject", value = SalesforceObjectDataset.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudObject", value = SalesforceServiceCloudObjectDataset.class), - @JsonSubTypes.Type(name = "SybaseTable", value = SybaseTableDataset.class), - @JsonSubTypes.Type(name = "SapBwCube", value = SapBwCubeDataset.class), - @JsonSubTypes.Type(name = "SapCloudForCustomerResource", value = SapCloudForCustomerResourceDataset.class), - @JsonSubTypes.Type(name = "SapEccResource", value = SapEccResourceDataset.class), - @JsonSubTypes.Type(name = "SapHanaTable", value = SapHanaTableDataset.class), - @JsonSubTypes.Type(name = "SapOpenHubTable", value = SapOpenHubTableDataset.class), - @JsonSubTypes.Type(name = "SqlServerTable", value = SqlServerTableDataset.class), - @JsonSubTypes.Type(name = "AmazonRdsForSqlServerTable", value = AmazonRdsForSqlServerTableDataset.class), - @JsonSubTypes.Type(name = "RestResource", value = RestResourceDataset.class), - @JsonSubTypes.Type(name = "SapTableResource", value = SapTableResourceDataset.class), - @JsonSubTypes.Type(name = "SapOdpResource", value = SapOdpResourceDataset.class), - @JsonSubTypes.Type(name = "WebTable", value = WebTableDataset.class), - @JsonSubTypes.Type(name = "AzureSearchIndex", value = AzureSearchIndexDataset.class), - @JsonSubTypes.Type(name = "HttpFile", value = HttpDataset.class), - @JsonSubTypes.Type(name = "AmazonMWSObject", value = AmazonMwsObjectDataset.class), - @JsonSubTypes.Type(name = "AzurePostgreSqlTable", value = AzurePostgreSqlTableDataset.class), - @JsonSubTypes.Type(name = "ConcurObject", value = ConcurObjectDataset.class), - @JsonSubTypes.Type(name = "CouchbaseTable", value = CouchbaseTableDataset.class), - @JsonSubTypes.Type(name = "DrillTable", value = DrillTableDataset.class), - @JsonSubTypes.Type(name = "EloquaObject", value = EloquaObjectDataset.class), - @JsonSubTypes.Type(name = "GoogleBigQueryObject", value = GoogleBigQueryObjectDataset.class), - @JsonSubTypes.Type(name = "GoogleBigQueryV2Object", value = GoogleBigQueryV2ObjectDataset.class), - @JsonSubTypes.Type(name = "GreenplumTable", value = GreenplumTableDataset.class), - @JsonSubTypes.Type(name = "HBaseObject", value = HBaseObjectDataset.class), - @JsonSubTypes.Type(name = "HiveObject", value = HiveObjectDataset.class), - @JsonSubTypes.Type(name = "HubspotObject", value = HubspotObjectDataset.class), - @JsonSubTypes.Type(name = "ImpalaObject", value = ImpalaObjectDataset.class), - @JsonSubTypes.Type(name = "JiraObject", value = JiraObjectDataset.class), - @JsonSubTypes.Type(name = "MagentoObject", value = MagentoObjectDataset.class), - @JsonSubTypes.Type(name = "MariaDBTable", value = MariaDBTableDataset.class), - @JsonSubTypes.Type(name = "AzureMariaDBTable", value = AzureMariaDBTableDataset.class), - @JsonSubTypes.Type(name = "MarketoObject", value = MarketoObjectDataset.class), - @JsonSubTypes.Type(name = "PaypalObject", value = PaypalObjectDataset.class), - @JsonSubTypes.Type(name = "PhoenixObject", value = PhoenixObjectDataset.class), - @JsonSubTypes.Type(name = "PrestoObject", value = PrestoObjectDataset.class), - @JsonSubTypes.Type(name = "QuickBooksObject", value = QuickBooksObjectDataset.class), - @JsonSubTypes.Type(name = "ServiceNowObject", value = ServiceNowObjectDataset.class), - @JsonSubTypes.Type(name = "ShopifyObject", value = ShopifyObjectDataset.class), - @JsonSubTypes.Type(name = "SparkObject", value = SparkObjectDataset.class), - @JsonSubTypes.Type(name = "SquareObject", value = SquareObjectDataset.class), - @JsonSubTypes.Type(name = "XeroObject", value = XeroObjectDataset.class), - @JsonSubTypes.Type(name = "ZohoObject", value = ZohoObjectDataset.class), - @JsonSubTypes.Type(name = "NetezzaTable", value = NetezzaTableDataset.class), - @JsonSubTypes.Type(name = "VerticaTable", value = VerticaTableDataset.class), - @JsonSubTypes.Type(name = "SalesforceMarketingCloudObject", value = SalesforceMarketingCloudObjectDataset.class), - @JsonSubTypes.Type(name = "ResponsysObject", value = ResponsysObjectDataset.class), - @JsonSubTypes.Type(name = "DynamicsAXResource", value = DynamicsAXResourceDataset.class), - @JsonSubTypes.Type(name = "OracleServiceCloudObject", value = OracleServiceCloudObjectDataset.class), - @JsonSubTypes.Type(name = "AzureDataExplorerTable", value = AzureDataExplorerTableDataset.class), - @JsonSubTypes.Type(name = "GoogleAdWordsObject", value = GoogleAdWordsObjectDataset.class), - @JsonSubTypes.Type(name = "SnowflakeTable", value = SnowflakeDataset.class), - @JsonSubTypes.Type(name = "SnowflakeV2Table", value = SnowflakeV2Dataset.class), - @JsonSubTypes.Type(name = "SharePointOnlineListResource", value = SharePointOnlineListResourceDataset.class), - @JsonSubTypes.Type(name = "AzureDatabricksDeltaLakeDataset", value = AzureDatabricksDeltaLakeDataset.class), - @JsonSubTypes.Type(name = "LakeHouseTable", value = LakeHouseTableDataset.class), - @JsonSubTypes.Type(name = "SalesforceV2Object", value = SalesforceV2ObjectDataset.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudV2Object", value = SalesforceServiceCloudV2ObjectDataset.class), - @JsonSubTypes.Type(name = "WarehouseTable", value = WarehouseTableDataset.class), - @JsonSubTypes.Type(name = "ServiceNowV2Object", value = ServiceNowV2ObjectDataset.class) }) @Fluent -public class Dataset { +public class Dataset implements JsonSerializable { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Dataset"; /* * Dataset description. */ - @JsonProperty(value = "description") private String description; /* * Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: * DatasetDataElement. */ - @JsonProperty(value = "structure") private Object structure; /* * Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), * itemType: DatasetSchemaDataElement. */ - @JsonProperty(value = "schema") private Object schema; /* * Linked service reference. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * Parameters for dataset. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * List of tags that can be used for describing the Dataset. */ - @JsonProperty(value = "annotations") private List annotations; /* * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ - @JsonProperty(value = "folder") private DatasetFolder folder; /* * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, * folders, and documents. */ - @JsonIgnore private Map additionalProperties; /** @@ -358,7 +236,6 @@ public Dataset withFolder(DatasetFolder folder) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -375,14 +252,6 @@ public Dataset withAdditionalProperties(Map additionalProperties return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -408,4 +277,307 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Dataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeUntypedField("structure", this.structure); + jsonWriter.writeUntypedField("schema", this.schema); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", this.folder); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Dataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Dataset if the JsonReader was pointing to an instance of it, or null if it was pointing to + * JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Dataset. + */ + public static Dataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AmazonS3Object".equals(discriminatorValue)) { + return AmazonS3Dataset.fromJson(readerToUse.reset()); + } else if ("Avro".equals(discriminatorValue)) { + return AvroDataset.fromJson(readerToUse.reset()); + } else if ("Excel".equals(discriminatorValue)) { + return ExcelDataset.fromJson(readerToUse.reset()); + } else if ("Parquet".equals(discriminatorValue)) { + return ParquetDataset.fromJson(readerToUse.reset()); + } else if ("DelimitedText".equals(discriminatorValue)) { + return DelimitedTextDataset.fromJson(readerToUse.reset()); + } else if ("Json".equals(discriminatorValue)) { + return JsonDataset.fromJson(readerToUse.reset()); + } else if ("Xml".equals(discriminatorValue)) { + return XmlDataset.fromJson(readerToUse.reset()); + } else if ("Orc".equals(discriminatorValue)) { + return OrcDataset.fromJson(readerToUse.reset()); + } else if ("Binary".equals(discriminatorValue)) { + return BinaryDataset.fromJson(readerToUse.reset()); + } else if ("AzureBlob".equals(discriminatorValue)) { + return AzureBlobDataset.fromJson(readerToUse.reset()); + } else if ("AzureTable".equals(discriminatorValue)) { + return AzureTableDataset.fromJson(readerToUse.reset()); + } else if ("AzureSqlTable".equals(discriminatorValue)) { + return AzureSqlTableDataset.fromJson(readerToUse.reset()); + } else if ("AzureSqlMITable".equals(discriminatorValue)) { + return AzureSqlMITableDataset.fromJson(readerToUse.reset()); + } else if ("AzureSqlDWTable".equals(discriminatorValue)) { + return AzureSqlDWTableDataset.fromJson(readerToUse.reset()); + } else if ("CassandraTable".equals(discriminatorValue)) { + return CassandraTableDataset.fromJson(readerToUse.reset()); + } else if ("CustomDataset".equals(discriminatorValue)) { + return CustomDataset.fromJson(readerToUse.reset()); + } else if ("CosmosDbSqlApiCollection".equals(discriminatorValue)) { + return CosmosDbSqlApiCollectionDataset.fromJson(readerToUse.reset()); + } else if ("DocumentDbCollection".equals(discriminatorValue)) { + return DocumentDbCollectionDataset.fromJson(readerToUse.reset()); + } else if ("DynamicsEntity".equals(discriminatorValue)) { + return DynamicsEntityDataset.fromJson(readerToUse.reset()); + } else if ("DynamicsCrmEntity".equals(discriminatorValue)) { + return DynamicsCrmEntityDataset.fromJson(readerToUse.reset()); + } else if ("CommonDataServiceForAppsEntity".equals(discriminatorValue)) { + return CommonDataServiceForAppsEntityDataset.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreFile".equals(discriminatorValue)) { + return AzureDataLakeStoreDataset.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSFile".equals(discriminatorValue)) { + return AzureBlobFSDataset.fromJson(readerToUse.reset()); + } else if ("Office365Table".equals(discriminatorValue)) { + return Office365Dataset.fromJson(readerToUse.reset()); + } else if ("FileShare".equals(discriminatorValue)) { + return FileShareDataset.fromJson(readerToUse.reset()); + } else if ("MongoDbCollection".equals(discriminatorValue)) { + return MongoDbCollectionDataset.fromJson(readerToUse.reset()); + } else if ("MongoDbAtlasCollection".equals(discriminatorValue)) { + return MongoDbAtlasCollectionDataset.fromJson(readerToUse.reset()); + } else if ("MongoDbV2Collection".equals(discriminatorValue)) { + return MongoDbV2CollectionDataset.fromJson(readerToUse.reset()); + } else if ("CosmosDbMongoDbApiCollection".equals(discriminatorValue)) { + return CosmosDbMongoDbApiCollectionDataset.fromJson(readerToUse.reset()); + } else if ("ODataResource".equals(discriminatorValue)) { + return ODataResourceDataset.fromJson(readerToUse.reset()); + } else if ("OracleTable".equals(discriminatorValue)) { + return OracleTableDataset.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForOracleTable".equals(discriminatorValue)) { + return AmazonRdsForOracleTableDataset.fromJson(readerToUse.reset()); + } else if ("TeradataTable".equals(discriminatorValue)) { + return TeradataTableDataset.fromJson(readerToUse.reset()); + } else if ("AzureMySqlTable".equals(discriminatorValue)) { + return AzureMySqlTableDataset.fromJson(readerToUse.reset()); + } else if ("AmazonRedshiftTable".equals(discriminatorValue)) { + return AmazonRedshiftTableDataset.fromJson(readerToUse.reset()); + } else if ("Db2Table".equals(discriminatorValue)) { + return Db2TableDataset.fromJson(readerToUse.reset()); + } else if ("RelationalTable".equals(discriminatorValue)) { + return RelationalTableDataset.fromJson(readerToUse.reset()); + } else if ("InformixTable".equals(discriminatorValue)) { + return InformixTableDataset.fromJson(readerToUse.reset()); + } else if ("OdbcTable".equals(discriminatorValue)) { + return OdbcTableDataset.fromJson(readerToUse.reset()); + } else if ("MySqlTable".equals(discriminatorValue)) { + return MySqlTableDataset.fromJson(readerToUse.reset()); + } else if ("PostgreSqlTable".equals(discriminatorValue)) { + return PostgreSqlTableDataset.fromJson(readerToUse.reset()); + } else if ("PostgreSqlV2Table".equals(discriminatorValue)) { + return PostgreSqlV2TableDataset.fromJson(readerToUse.reset()); + } else if ("MicrosoftAccessTable".equals(discriminatorValue)) { + return MicrosoftAccessTableDataset.fromJson(readerToUse.reset()); + } else if ("SalesforceObject".equals(discriminatorValue)) { + return SalesforceObjectDataset.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudObject".equals(discriminatorValue)) { + return SalesforceServiceCloudObjectDataset.fromJson(readerToUse.reset()); + } else if ("SybaseTable".equals(discriminatorValue)) { + return SybaseTableDataset.fromJson(readerToUse.reset()); + } else if ("SapBwCube".equals(discriminatorValue)) { + return SapBwCubeDataset.fromJson(readerToUse.reset()); + } else if ("SapCloudForCustomerResource".equals(discriminatorValue)) { + return SapCloudForCustomerResourceDataset.fromJson(readerToUse.reset()); + } else if ("SapEccResource".equals(discriminatorValue)) { + return SapEccResourceDataset.fromJson(readerToUse.reset()); + } else if ("SapHanaTable".equals(discriminatorValue)) { + return SapHanaTableDataset.fromJson(readerToUse.reset()); + } else if ("SapOpenHubTable".equals(discriminatorValue)) { + return SapOpenHubTableDataset.fromJson(readerToUse.reset()); + } else if ("SqlServerTable".equals(discriminatorValue)) { + return SqlServerTableDataset.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForSqlServerTable".equals(discriminatorValue)) { + return AmazonRdsForSqlServerTableDataset.fromJson(readerToUse.reset()); + } else if ("RestResource".equals(discriminatorValue)) { + return RestResourceDataset.fromJson(readerToUse.reset()); + } else if ("SapTableResource".equals(discriminatorValue)) { + return SapTableResourceDataset.fromJson(readerToUse.reset()); + } else if ("SapOdpResource".equals(discriminatorValue)) { + return SapOdpResourceDataset.fromJson(readerToUse.reset()); + } else if ("WebTable".equals(discriminatorValue)) { + return WebTableDataset.fromJson(readerToUse.reset()); + } else if ("AzureSearchIndex".equals(discriminatorValue)) { + return AzureSearchIndexDataset.fromJson(readerToUse.reset()); + } else if ("HttpFile".equals(discriminatorValue)) { + return HttpDataset.fromJson(readerToUse.reset()); + } else if ("AmazonMWSObject".equals(discriminatorValue)) { + return AmazonMwsObjectDataset.fromJson(readerToUse.reset()); + } else if ("AzurePostgreSqlTable".equals(discriminatorValue)) { + return AzurePostgreSqlTableDataset.fromJson(readerToUse.reset()); + } else if ("ConcurObject".equals(discriminatorValue)) { + return ConcurObjectDataset.fromJson(readerToUse.reset()); + } else if ("CouchbaseTable".equals(discriminatorValue)) { + return CouchbaseTableDataset.fromJson(readerToUse.reset()); + } else if ("DrillTable".equals(discriminatorValue)) { + return DrillTableDataset.fromJson(readerToUse.reset()); + } else if ("EloquaObject".equals(discriminatorValue)) { + return EloquaObjectDataset.fromJson(readerToUse.reset()); + } else if ("GoogleBigQueryObject".equals(discriminatorValue)) { + return GoogleBigQueryObjectDataset.fromJson(readerToUse.reset()); + } else if ("GoogleBigQueryV2Object".equals(discriminatorValue)) { + return GoogleBigQueryV2ObjectDataset.fromJson(readerToUse.reset()); + } else if ("GreenplumTable".equals(discriminatorValue)) { + return GreenplumTableDataset.fromJson(readerToUse.reset()); + } else if ("HBaseObject".equals(discriminatorValue)) { + return HBaseObjectDataset.fromJson(readerToUse.reset()); + } else if ("HiveObject".equals(discriminatorValue)) { + return HiveObjectDataset.fromJson(readerToUse.reset()); + } else if ("HubspotObject".equals(discriminatorValue)) { + return HubspotObjectDataset.fromJson(readerToUse.reset()); + } else if ("ImpalaObject".equals(discriminatorValue)) { + return ImpalaObjectDataset.fromJson(readerToUse.reset()); + } else if ("JiraObject".equals(discriminatorValue)) { + return JiraObjectDataset.fromJson(readerToUse.reset()); + } else if ("MagentoObject".equals(discriminatorValue)) { + return MagentoObjectDataset.fromJson(readerToUse.reset()); + } else if ("MariaDBTable".equals(discriminatorValue)) { + return MariaDBTableDataset.fromJson(readerToUse.reset()); + } else if ("AzureMariaDBTable".equals(discriminatorValue)) { + return AzureMariaDBTableDataset.fromJson(readerToUse.reset()); + } else if ("MarketoObject".equals(discriminatorValue)) { + return MarketoObjectDataset.fromJson(readerToUse.reset()); + } else if ("PaypalObject".equals(discriminatorValue)) { + return PaypalObjectDataset.fromJson(readerToUse.reset()); + } else if ("PhoenixObject".equals(discriminatorValue)) { + return PhoenixObjectDataset.fromJson(readerToUse.reset()); + } else if ("PrestoObject".equals(discriminatorValue)) { + return PrestoObjectDataset.fromJson(readerToUse.reset()); + } else if ("QuickBooksObject".equals(discriminatorValue)) { + return QuickBooksObjectDataset.fromJson(readerToUse.reset()); + } else if ("ServiceNowObject".equals(discriminatorValue)) { + return ServiceNowObjectDataset.fromJson(readerToUse.reset()); + } else if ("ShopifyObject".equals(discriminatorValue)) { + return ShopifyObjectDataset.fromJson(readerToUse.reset()); + } else if ("SparkObject".equals(discriminatorValue)) { + return SparkObjectDataset.fromJson(readerToUse.reset()); + } else if ("SquareObject".equals(discriminatorValue)) { + return SquareObjectDataset.fromJson(readerToUse.reset()); + } else if ("XeroObject".equals(discriminatorValue)) { + return XeroObjectDataset.fromJson(readerToUse.reset()); + } else if ("ZohoObject".equals(discriminatorValue)) { + return ZohoObjectDataset.fromJson(readerToUse.reset()); + } else if ("NetezzaTable".equals(discriminatorValue)) { + return NetezzaTableDataset.fromJson(readerToUse.reset()); + } else if ("VerticaTable".equals(discriminatorValue)) { + return VerticaTableDataset.fromJson(readerToUse.reset()); + } else if ("SalesforceMarketingCloudObject".equals(discriminatorValue)) { + return SalesforceMarketingCloudObjectDataset.fromJson(readerToUse.reset()); + } else if ("ResponsysObject".equals(discriminatorValue)) { + return ResponsysObjectDataset.fromJson(readerToUse.reset()); + } else if ("DynamicsAXResource".equals(discriminatorValue)) { + return DynamicsAXResourceDataset.fromJson(readerToUse.reset()); + } else if ("OracleServiceCloudObject".equals(discriminatorValue)) { + return OracleServiceCloudObjectDataset.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorerTable".equals(discriminatorValue)) { + return AzureDataExplorerTableDataset.fromJson(readerToUse.reset()); + } else if ("GoogleAdWordsObject".equals(discriminatorValue)) { + return GoogleAdWordsObjectDataset.fromJson(readerToUse.reset()); + } else if ("SnowflakeTable".equals(discriminatorValue)) { + return SnowflakeDataset.fromJson(readerToUse.reset()); + } else if ("SnowflakeV2Table".equals(discriminatorValue)) { + return SnowflakeV2Dataset.fromJson(readerToUse.reset()); + } else if ("SharePointOnlineListResource".equals(discriminatorValue)) { + return SharePointOnlineListResourceDataset.fromJson(readerToUse.reset()); + } else if ("AzureDatabricksDeltaLakeDataset".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeDataset.fromJson(readerToUse.reset()); + } else if ("LakeHouseTable".equals(discriminatorValue)) { + return LakeHouseTableDataset.fromJson(readerToUse.reset()); + } else if ("SalesforceV2Object".equals(discriminatorValue)) { + return SalesforceV2ObjectDataset.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudV2Object".equals(discriminatorValue)) { + return SalesforceServiceCloudV2ObjectDataset.fromJson(readerToUse.reset()); + } else if ("WarehouseTable".equals(discriminatorValue)) { + return WarehouseTableDataset.fromJson(readerToUse.reset()); + } else if ("ServiceNowV2Object".equals(discriminatorValue)) { + return ServiceNowV2ObjectDataset.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static Dataset fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Dataset deserializedDataset = new Dataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDataset.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDataset.type = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedDataset.description = reader.getString(); + } else if ("structure".equals(fieldName)) { + deserializedDataset.structure = reader.readUntyped(); + } else if ("schema".equals(fieldName)) { + deserializedDataset.schema = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDataset.parameters = parameters; + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDataset.annotations = annotations; + } else if ("folder".equals(fieldName)) { + deserializedDataset.folder = DatasetFolder.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataset.additionalProperties = additionalProperties; + + return deserializedDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java index 12e3fca73914..b47e0efc8cd9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetCompression.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -17,23 +18,20 @@ * The compression method used on a dataset. */ @Fluent -public final class DatasetCompression { +public final class DatasetCompression implements JsonSerializable { /* * Type of dataset compression. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "type", required = true) private Object type; /* * The dataset compression level. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "level") private Object level; /* * The compression method used on a dataset. */ - @JsonIgnore private Map additionalProperties; /** @@ -87,7 +85,6 @@ public DatasetCompression withLevel(Object level) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -103,14 +100,6 @@ public DatasetCompression withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -124,4 +113,55 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatasetCompression.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("type", this.type); + jsonWriter.writeUntypedField("level", this.level); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetCompression from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetCompression if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatasetCompression. + */ + public static DatasetCompression fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetCompression deserializedDatasetCompression = new DatasetCompression(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDatasetCompression.type = reader.readUntyped(); + } else if ("level".equals(fieldName)) { + deserializedDatasetCompression.level = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatasetCompression.additionalProperties = additionalProperties; + + return deserializedDatasetCompression; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java index c8cabc573c4e..8720bf0f9956 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetDebugResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Dataset debug resource. @@ -16,7 +19,6 @@ public final class DatasetDebugResource extends SubResourceDebugResource { /* * Dataset properties. */ - @JsonProperty(value = "properties", required = true) private Dataset properties; /** @@ -72,4 +74,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatasetDebugResource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetDebugResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetDebugResource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatasetDebugResource. + */ + public static DatasetDebugResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetDebugResource deserializedDatasetDebugResource = new DatasetDebugResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatasetDebugResource.withName(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedDatasetDebugResource.properties = Dataset.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedDatasetDebugResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetFolder.java index 4f1a4104db21..2c16b6a588e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetFolder.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ @Fluent -public final class DatasetFolder { +public final class DatasetFolder implements JsonSerializable { /* * The name of the folder that this Dataset is in. */ - @JsonProperty(value = "name") private String name; /** @@ -51,4 +54,40 @@ public DatasetFolder withName(String name) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetFolder from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetFolder if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DatasetFolder. + */ + public static DatasetFolder fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetFolder deserializedDatasetFolder = new DatasetFolder(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatasetFolder.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDatasetFolder; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java index abbf13e5673f..e4314ebeb644 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DatasetResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of dataset resources. */ @Fluent -public final class DatasetListResponse { +public final class DatasetListResponse implements JsonSerializable { /* * List of datasets. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatasetListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetListResponse if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatasetListResponse. + */ + public static DatasetListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetListResponse deserializedDatasetListResponse = new DatasetListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> DatasetResourceInner.fromJson(reader1)); + deserializedDatasetListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedDatasetListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDatasetListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java index a6fccdf29fc6..bab161561f06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetLocation.java @@ -5,62 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Dataset location. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DatasetLocation.class, visible = true) -@JsonTypeName("DatasetLocation") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AzureBlobStorageLocation", value = AzureBlobStorageLocation.class), - @JsonSubTypes.Type(name = "AzureBlobFSLocation", value = AzureBlobFSLocation.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreLocation", value = AzureDataLakeStoreLocation.class), - @JsonSubTypes.Type(name = "AmazonS3Location", value = AmazonS3Location.class), - @JsonSubTypes.Type(name = "FileServerLocation", value = FileServerLocation.class), - @JsonSubTypes.Type(name = "AzureFileStorageLocation", value = AzureFileStorageLocation.class), - @JsonSubTypes.Type(name = "AmazonS3CompatibleLocation", value = AmazonS3CompatibleLocation.class), - @JsonSubTypes.Type(name = "OracleCloudStorageLocation", value = OracleCloudStorageLocation.class), - @JsonSubTypes.Type(name = "GoogleCloudStorageLocation", value = GoogleCloudStorageLocation.class), - @JsonSubTypes.Type(name = "FtpServerLocation", value = FtpServerLocation.class), - @JsonSubTypes.Type(name = "SftpLocation", value = SftpLocation.class), - @JsonSubTypes.Type(name = "HttpServerLocation", value = HttpServerLocation.class), - @JsonSubTypes.Type(name = "HdfsLocation", value = HdfsLocation.class), - @JsonSubTypes.Type(name = "LakeHouseLocation", value = LakeHouseLocation.class) }) @Fluent -public class DatasetLocation { +public class DatasetLocation implements JsonSerializable { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DatasetLocation"; /* * Specify the folder path of dataset. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "folderPath") private Object folderPath; /* * Specify the file name of dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileName") private Object fileName; /* * Dataset location. */ - @JsonIgnore private Map additionalProperties; /** @@ -125,7 +100,6 @@ public DatasetLocation withFileName(Object fileName) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -141,14 +115,6 @@ public DatasetLocation withAdditionalProperties(Map additionalPr return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -156,4 +122,108 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("folderPath", this.folderPath); + jsonWriter.writeUntypedField("fileName", this.fileName); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DatasetLocation. + */ + public static DatasetLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureBlobStorageLocation".equals(discriminatorValue)) { + return AzureBlobStorageLocation.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSLocation".equals(discriminatorValue)) { + return AzureBlobFSLocation.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreLocation".equals(discriminatorValue)) { + return AzureDataLakeStoreLocation.fromJson(readerToUse.reset()); + } else if ("AmazonS3Location".equals(discriminatorValue)) { + return AmazonS3Location.fromJson(readerToUse.reset()); + } else if ("FileServerLocation".equals(discriminatorValue)) { + return FileServerLocation.fromJson(readerToUse.reset()); + } else if ("AzureFileStorageLocation".equals(discriminatorValue)) { + return AzureFileStorageLocation.fromJson(readerToUse.reset()); + } else if ("AmazonS3CompatibleLocation".equals(discriminatorValue)) { + return AmazonS3CompatibleLocation.fromJson(readerToUse.reset()); + } else if ("OracleCloudStorageLocation".equals(discriminatorValue)) { + return OracleCloudStorageLocation.fromJson(readerToUse.reset()); + } else if ("GoogleCloudStorageLocation".equals(discriminatorValue)) { + return GoogleCloudStorageLocation.fromJson(readerToUse.reset()); + } else if ("FtpServerLocation".equals(discriminatorValue)) { + return FtpServerLocation.fromJson(readerToUse.reset()); + } else if ("SftpLocation".equals(discriminatorValue)) { + return SftpLocation.fromJson(readerToUse.reset()); + } else if ("HttpServerLocation".equals(discriminatorValue)) { + return HttpServerLocation.fromJson(readerToUse.reset()); + } else if ("HdfsLocation".equals(discriminatorValue)) { + return HdfsLocation.fromJson(readerToUse.reset()); + } else if ("LakeHouseLocation".equals(discriminatorValue)) { + return LakeHouseLocation.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static DatasetLocation fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetLocation deserializedDatasetLocation = new DatasetLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDatasetLocation.type = reader.getString(); + } else if ("folderPath".equals(fieldName)) { + deserializedDatasetLocation.folderPath = reader.readUntyped(); + } else if ("fileName".equals(fieldName)) { + deserializedDatasetLocation.fileName = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatasetLocation.additionalProperties = additionalProperties; + + return deserializedDatasetLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java index d5780bf1dbc2..d80a1f467041 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetReference.java @@ -6,32 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; /** * Dataset reference type. */ @Fluent -public final class DatasetReference { +public final class DatasetReference implements JsonSerializable { /* * Dataset reference type. */ - @JsonProperty(value = "type", required = true) private String type = "DatasetReference"; /* * Reference dataset name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Arguments for dataset. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /** @@ -113,4 +112,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DatasetReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("referenceName", this.referenceName); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DatasetReference. + */ + public static DatasetReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetReference deserializedDatasetReference = new DatasetReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceName".equals(fieldName)) { + deserializedDatasetReference.referenceName = reader.getString(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedDatasetReference.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedDatasetReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetSchemaDataElement.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetSchemaDataElement.java index 8fdb0798252b..8055b14a1ccc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetSchemaDataElement.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetSchemaDataElement.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,23 +17,20 @@ * Columns that define the physical type schema of the dataset. */ @Fluent -public final class DatasetSchemaDataElement { +public final class DatasetSchemaDataElement implements JsonSerializable { /* * Name of the schema column. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "name") private Object name; /* * Type of the schema column. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "type") private Object type; /* * Columns that define the physical type schema of the dataset. */ - @JsonIgnore private Map additionalProperties; /** @@ -86,7 +84,6 @@ public DatasetSchemaDataElement withType(Object type) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -102,14 +99,6 @@ public DatasetSchemaDataElement withAdditionalProperties(Map add return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -117,4 +106,54 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("name", this.name); + jsonWriter.writeUntypedField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetSchemaDataElement from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetSchemaDataElement if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DatasetSchemaDataElement. + */ + public static DatasetSchemaDataElement fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetSchemaDataElement deserializedDatasetSchemaDataElement = new DatasetSchemaDataElement(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDatasetSchemaDataElement.name = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedDatasetSchemaDataElement.type = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatasetSchemaDataElement.additionalProperties = additionalProperties; + + return deserializedDatasetSchemaDataElement; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java index b8146426b483..27e42e7e0a29 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DatasetStorageFormat.java @@ -5,53 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * The format definition of a storage. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DatasetStorageFormat.class, visible = true) -@JsonTypeName("DatasetStorageFormat") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "TextFormat", value = TextFormat.class), - @JsonSubTypes.Type(name = "JsonFormat", value = JsonFormat.class), - @JsonSubTypes.Type(name = "AvroFormat", value = AvroFormat.class), - @JsonSubTypes.Type(name = "OrcFormat", value = OrcFormat.class), - @JsonSubTypes.Type(name = "ParquetFormat", value = ParquetFormat.class) }) @Fluent -public class DatasetStorageFormat { +public class DatasetStorageFormat implements JsonSerializable { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DatasetStorageFormat"; /* * Serializer. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "serializer") private Object serializer; /* * Deserializer. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "deserializer") private Object deserializer; /* * The format definition of a storage. */ - @JsonIgnore private Map additionalProperties; /** @@ -114,7 +98,6 @@ public DatasetStorageFormat withDeserializer(Object deserializer) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -130,14 +113,6 @@ public DatasetStorageFormat withAdditionalProperties(Map additio return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -145,4 +120,90 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("serializer", this.serializer); + jsonWriter.writeUntypedField("deserializer", this.deserializer); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DatasetStorageFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DatasetStorageFormat if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the DatasetStorageFormat. + */ + public static DatasetStorageFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("TextFormat".equals(discriminatorValue)) { + return TextFormat.fromJson(readerToUse.reset()); + } else if ("JsonFormat".equals(discriminatorValue)) { + return JsonFormat.fromJson(readerToUse.reset()); + } else if ("AvroFormat".equals(discriminatorValue)) { + return AvroFormat.fromJson(readerToUse.reset()); + } else if ("OrcFormat".equals(discriminatorValue)) { + return OrcFormat.fromJson(readerToUse.reset()); + } else if ("ParquetFormat".equals(discriminatorValue)) { + return ParquetFormat.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static DatasetStorageFormat fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DatasetStorageFormat deserializedDatasetStorageFormat = new DatasetStorageFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDatasetStorageFormat.type = reader.getString(); + } else if ("serializer".equals(fieldName)) { + deserializedDatasetStorageFormat.serializer = reader.readUntyped(); + } else if ("deserializer".equals(fieldName)) { + deserializedDatasetStorageFormat.deserializer = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDatasetStorageFormat.additionalProperties = additionalProperties; + + return deserializedDatasetStorageFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java index 300d71ba6f03..e341b83212ef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DataworldLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DataworldLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Dataworld. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DataworldLinkedService.class, visible = true) -@JsonTypeName("Dataworld") @Fluent public final class DataworldLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Dataworld"; /* * Dataworld linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private DataworldLinkedServiceTypeProperties innerTypeProperties = new DataworldLinkedServiceTypeProperties(); /** @@ -161,4 +157,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DataworldLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DataworldLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DataworldLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DataworldLinkedService. + */ + public static DataworldLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DataworldLinkedService deserializedDataworldLinkedService = new DataworldLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDataworldLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDataworldLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDataworldLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDataworldLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDataworldLinkedService.innerTypeProperties + = DataworldLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDataworldLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDataworldLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDataworldLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DayOfWeek.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DayOfWeek.java index aa15197bfda5..cfadfe7d51e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DayOfWeek.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DayOfWeek.java @@ -4,9 +4,6 @@ package com.azure.resourcemanager.datafactory.models; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; - /** * The days of the week. */ @@ -61,7 +58,6 @@ public enum DayOfWeek { * @param value the serialized value to parse. * @return the parsed DayOfWeek object, or null if unable to parse. */ - @JsonCreator public static DayOfWeek fromString(String value) { if (value == null) { return null; @@ -78,7 +74,6 @@ public static DayOfWeek fromString(String value) { /** * {@inheritDoc} */ - @JsonValue @Override public String toString() { return this.value; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DaysOfWeek.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DaysOfWeek.java index 361749b122fd..7c8e30649de0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DaysOfWeek.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DaysOfWeek.java @@ -4,9 +4,6 @@ package com.azure.resourcemanager.datafactory.models; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; - /** * Defines values for DaysOfWeek. */ @@ -61,7 +58,6 @@ public enum DaysOfWeek { * @param value the serialized value to parse. * @return the parsed DaysOfWeek object, or null if unable to parse. */ - @JsonCreator public static DaysOfWeek fromString(String value) { if (value == null) { return null; @@ -78,7 +74,6 @@ public static DaysOfWeek fromString(String value) { /** * {@inheritDoc} */ - @JsonValue @Override public String toString() { return this.value; diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2AuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2AuthenticationType.java index 0155d507a90c..db3b74e525d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2AuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2AuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public Db2AuthenticationType() { * @param name a name to look for. * @return the corresponding Db2AuthenticationType. */ - @JsonCreator public static Db2AuthenticationType fromString(String name) { return fromString(name, Db2AuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java index cd8e3bc17607..4f331fe716b6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2LinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.Db2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for DB2 data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2LinkedService.class, visible = true) -@JsonTypeName("Db2") @Fluent public final class Db2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Db2"; /* * DB2 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private Db2LinkedServiceTypeProperties innerTypeProperties = new Db2LinkedServiceTypeProperties(); /** @@ -340,4 +336,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Db2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Db2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Db2LinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Db2LinkedService. + */ + public static Db2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Db2LinkedService deserializedDb2LinkedService = new Db2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDb2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDb2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDb2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDb2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDb2LinkedService.innerTypeProperties = Db2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDb2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDb2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDb2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java index e2bf5f5c6fd7..aae51e584aa5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2Source.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Db2 databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2Source.class, visible = true) -@JsonTypeName("Db2Source") @Fluent public final class Db2Source extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Db2Source"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public Db2Source withDisableMetricsCollection(Object disableMetricsCollection) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Db2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Db2Source if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the Db2Source. + */ + public static Db2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Db2Source deserializedDb2Source = new Db2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDb2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDb2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDb2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDb2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedDb2Source.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDb2Source.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDb2Source.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDb2Source.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDb2Source.withAdditionalProperties(additionalProperties); + + return deserializedDb2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java index 9b67b97d816e..7114c1ed372d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Db2TableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.Db2TableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Db2 table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Db2TableDataset.class, visible = true) -@JsonTypeName("Db2Table") @Fluent public final class Db2TableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Db2Table"; /* * Db2 table dataset properties. */ - @JsonProperty(value = "typeProperties") private Db2TableDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Db2TableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Db2TableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Db2TableDataset. + */ + public static Db2TableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Db2TableDataset deserializedDb2TableDataset = new Db2TableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDb2TableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDb2TableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDb2TableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDb2TableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDb2TableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDb2TableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDb2TableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedDb2TableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedDb2TableDataset.innerTypeProperties = Db2TableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDb2TableDataset.withAdditionalProperties(additionalProperties); + + return deserializedDb2TableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java index 25ebfa5d3ed4..21fd91376dcd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DeleteActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Delete activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DeleteActivity.class, visible = true) -@JsonTypeName("Delete") @Fluent public final class DeleteActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Delete"; /* * Delete activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private DeleteActivityTypeProperties innerTypeProperties = new DeleteActivityTypeProperties(); /** @@ -292,4 +289,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DeleteActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DeleteActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DeleteActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DeleteActivity. + */ + public static DeleteActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DeleteActivity deserializedDeleteActivity = new DeleteActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedDeleteActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedDeleteActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedDeleteActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedDeleteActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedDeleteActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedDeleteActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedDeleteActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedDeleteActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDeleteActivity.innerTypeProperties = DeleteActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDeleteActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDeleteActivity.withAdditionalProperties(additionalProperties); + + return deserializedDeleteActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteDataFlowDebugSessionRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteDataFlowDebugSessionRequest.java index 7fb9457c7770..129431193a97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteDataFlowDebugSessionRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DeleteDataFlowDebugSessionRequest.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Request body structure for deleting data flow debug session. */ @Fluent -public final class DeleteDataFlowDebugSessionRequest { +public final class DeleteDataFlowDebugSessionRequest implements JsonSerializable { /* * The ID of data flow debug session. */ - @JsonProperty(value = "sessionId") private String sessionId; /** @@ -51,4 +54,41 @@ public DeleteDataFlowDebugSessionRequest withSessionId(String sessionId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("sessionId", this.sessionId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DeleteDataFlowDebugSessionRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DeleteDataFlowDebugSessionRequest if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DeleteDataFlowDebugSessionRequest. + */ + public static DeleteDataFlowDebugSessionRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DeleteDataFlowDebugSessionRequest deserializedDeleteDataFlowDebugSessionRequest + = new DeleteDataFlowDebugSessionRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sessionId".equals(fieldName)) { + deserializedDeleteDataFlowDebugSessionRequest.sessionId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDeleteDataFlowDebugSessionRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java index eb9da833cb75..dcb3992703cb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DelimitedTextDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Delimited text dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextDataset.class, visible = true) -@JsonTypeName("DelimitedText") @Fluent public final class DelimitedTextDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DelimitedText"; /* * Delimited text dataset properties. */ - @JsonProperty(value = "typeProperties") private DelimitedTextDatasetTypeProperties innerTypeProperties; /** @@ -375,4 +371,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DelimitedTextDataset. + */ + public static DelimitedTextDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextDataset deserializedDelimitedTextDataset = new DelimitedTextDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDelimitedTextDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDelimitedTextDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDelimitedTextDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDelimitedTextDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDelimitedTextDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDelimitedTextDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDelimitedTextDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedDelimitedTextDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedDelimitedTextDataset.innerTypeProperties + = DelimitedTextDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDelimitedTextDataset.withAdditionalProperties(additionalProperties); + + return deserializedDelimitedTextDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java index f8ed2d8a2645..6bcb061c6f4f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextReadSettings.java @@ -5,40 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Delimited text read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DelimitedTextReadSettings.class, - visible = true) -@JsonTypeName("DelimitedTextReadSettings") @Fluent public final class DelimitedTextReadSettings extends FormatReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DelimitedTextReadSettings"; /* * Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression * with resultType integer). */ - @JsonProperty(value = "skipLineCount") private Object skipLineCount; /* * Compression settings. */ - @JsonProperty(value = "compressionProperties") private CompressionReadSettings compressionProperties; /** @@ -111,4 +103,58 @@ public void validate() { compressionProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("skipLineCount", this.skipLineCount); + jsonWriter.writeJsonField("compressionProperties", this.compressionProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextReadSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DelimitedTextReadSettings. + */ + public static DelimitedTextReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextReadSettings deserializedDelimitedTextReadSettings = new DelimitedTextReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDelimitedTextReadSettings.type = reader.getString(); + } else if ("skipLineCount".equals(fieldName)) { + deserializedDelimitedTextReadSettings.skipLineCount = reader.readUntyped(); + } else if ("compressionProperties".equals(fieldName)) { + deserializedDelimitedTextReadSettings.compressionProperties + = CompressionReadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDelimitedTextReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedDelimitedTextReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java index 73d6c1d6a3b6..d3662c9dfdfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity DelimitedText sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextSink.class, visible = true) -@JsonTypeName("DelimitedTextSink") @Fluent public final class DelimitedTextSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DelimitedTextSink"; /* * DelimitedText store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /* * DelimitedText format settings. */ - @JsonProperty(value = "formatSettings") private DelimitedTextWriteSettings formatSettings; /** @@ -161,4 +157,75 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DelimitedTextSink. + */ + public static DelimitedTextSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextSink deserializedDelimitedTextSink = new DelimitedTextSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedDelimitedTextSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedDelimitedTextSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedDelimitedTextSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedDelimitedTextSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDelimitedTextSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDelimitedTextSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDelimitedTextSink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedDelimitedTextSink.storeSettings = StoreWriteSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedDelimitedTextSink.formatSettings = DelimitedTextWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDelimitedTextSink.withAdditionalProperties(additionalProperties); + + return deserializedDelimitedTextSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java index 78ac7c05ae07..41c8fbb211f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity DelimitedText source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DelimitedTextSource.class, visible = true) -@JsonTypeName("DelimitedTextSource") @Fluent public final class DelimitedTextSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DelimitedTextSource"; /* * DelimitedText store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * DelimitedText format settings. */ - @JsonProperty(value = "formatSettings") private DelimitedTextReadSettings formatSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -172,4 +167,72 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DelimitedTextSource. + */ + public static DelimitedTextSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextSource deserializedDelimitedTextSource = new DelimitedTextSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDelimitedTextSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDelimitedTextSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDelimitedTextSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDelimitedTextSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDelimitedTextSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedDelimitedTextSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedDelimitedTextSource.formatSettings = DelimitedTextReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDelimitedTextSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDelimitedTextSource.withAdditionalProperties(additionalProperties); + + return deserializedDelimitedTextSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java index f84955ecbf65..9921880498f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DelimitedTextWriteSettings.java @@ -6,54 +6,44 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Delimited text write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DelimitedTextWriteSettings.class, - visible = true) -@JsonTypeName("DelimitedTextWriteSettings") @Fluent public final class DelimitedTextWriteSettings extends FormatWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DelimitedTextWriteSettings"; /* * Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "quoteAllText") private Object quoteAllText; /* * The file extension used to create the files. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileExtension", required = true) private Object fileExtension; /* * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* * Specifies the file name pattern _. when copy from non-file based store * without partitionOptions. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; /** @@ -178,4 +168,64 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DelimitedTextWriteSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("fileExtension", this.fileExtension); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("quoteAllText", this.quoteAllText); + jsonWriter.writeUntypedField("maxRowsPerFile", this.maxRowsPerFile); + jsonWriter.writeUntypedField("fileNamePrefix", this.fileNamePrefix); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DelimitedTextWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DelimitedTextWriteSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DelimitedTextWriteSettings. + */ + public static DelimitedTextWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DelimitedTextWriteSettings deserializedDelimitedTextWriteSettings = new DelimitedTextWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("fileExtension".equals(fieldName)) { + deserializedDelimitedTextWriteSettings.fileExtension = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedDelimitedTextWriteSettings.type = reader.getString(); + } else if ("quoteAllText".equals(fieldName)) { + deserializedDelimitedTextWriteSettings.quoteAllText = reader.readUntyped(); + } else if ("maxRowsPerFile".equals(fieldName)) { + deserializedDelimitedTextWriteSettings.maxRowsPerFile = reader.readUntyped(); + } else if ("fileNamePrefix".equals(fieldName)) { + deserializedDelimitedTextWriteSettings.fileNamePrefix = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDelimitedTextWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedDelimitedTextWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyCondition.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyCondition.java index d49fde453ae5..78ecbdf2fb17 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyCondition.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyCondition.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public DependencyCondition() { * @param name a name to look for. * @return the corresponding DependencyCondition. */ - @JsonCreator public static DependencyCondition fromString(String name) { return fromString(name, DependencyCondition.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java index bd5167ce4ddd..a5b789c6232a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DependencyReference.java @@ -5,29 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Referenced dependency. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DependencyReference.class, visible = true) -@JsonTypeName("DependencyReference") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "TriggerDependencyReference", value = TriggerDependencyReference.class), - @JsonSubTypes.Type( - name = "SelfDependencyTumblingWindowTriggerReference", - value = SelfDependencyTumblingWindowTriggerReference.class) }) @Immutable -public class DependencyReference { +public class DependencyReference implements JsonSerializable { /* * The type of dependency reference. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DependencyReference"; /** @@ -52,4 +43,69 @@ public String type() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DependencyReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DependencyReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DependencyReference. + */ + public static DependencyReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("TriggerDependencyReference".equals(discriminatorValue)) { + return TriggerDependencyReference.fromJsonKnownDiscriminator(readerToUse.reset()); + } else if ("TumblingWindowTriggerDependencyReference".equals(discriminatorValue)) { + return TumblingWindowTriggerDependencyReference.fromJson(readerToUse.reset()); + } else if ("SelfDependencyTumblingWindowTriggerReference".equals(discriminatorValue)) { + return SelfDependencyTumblingWindowTriggerReference.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static DependencyReference fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DependencyReference deserializedDependencyReference = new DependencyReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedDependencyReference.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedDependencyReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java index ee5db667f743..b5acb9d0e2eb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DistcpSettings.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Distcp settings. */ @Fluent -public final class DistcpSettings { +public final class DistcpSettings implements JsonSerializable { /* * Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "resourceManagerEndpoint", required = true) private Object resourceManagerEndpoint; /* @@ -24,13 +27,11 @@ public final class DistcpSettings { * generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "tempScriptPath", required = true) private Object tempScriptPath; /* * Specifies the Distcp options. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "distcpOptions") private Object distcpOptions; /** @@ -125,4 +126,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DistcpSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("resourceManagerEndpoint", this.resourceManagerEndpoint); + jsonWriter.writeUntypedField("tempScriptPath", this.tempScriptPath); + jsonWriter.writeUntypedField("distcpOptions", this.distcpOptions); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DistcpSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DistcpSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DistcpSettings. + */ + public static DistcpSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DistcpSettings deserializedDistcpSettings = new DistcpSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("resourceManagerEndpoint".equals(fieldName)) { + deserializedDistcpSettings.resourceManagerEndpoint = reader.readUntyped(); + } else if ("tempScriptPath".equals(fieldName)) { + deserializedDistcpSettings.tempScriptPath = reader.readUntyped(); + } else if ("distcpOptions".equals(fieldName)) { + deserializedDistcpSettings.distcpOptions = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedDistcpSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java index df2eec8aae32..40b6c6020152 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DocumentDbCollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Azure Document Database Collection dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DocumentDbCollectionDataset.class, - visible = true) -@JsonTypeName("DocumentDbCollection") @Fluent public final class DocumentDbCollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DocumentDbCollection"; /* * DocumentDB Collection dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private DocumentDbCollectionDatasetTypeProperties innerTypeProperties = new DocumentDbCollectionDatasetTypeProperties(); @@ -170,4 +162,81 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DocumentDbCollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DocumentDbCollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DocumentDbCollectionDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DocumentDbCollectionDataset. + */ + public static DocumentDbCollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DocumentDbCollectionDataset deserializedDocumentDbCollectionDataset = new DocumentDbCollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDocumentDbCollectionDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDocumentDbCollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDocumentDbCollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.innerTypeProperties + = DocumentDbCollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDocumentDbCollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDocumentDbCollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedDocumentDbCollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java index c78b0f5f59b7..554daa03b631 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSink.java @@ -5,40 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Document Database Collection sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DocumentDbCollectionSink.class, - visible = true) -@JsonTypeName("DocumentDbCollectionSink") @Fluent public final class DocumentDbCollectionSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DocumentDbCollectionSink"; /* * Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nestingSeparator") private Object nestingSeparator; /* * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed * values: insert and upsert. */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -164,4 +156,75 @@ public DocumentDbCollectionSink withDisableMetricsCollection(Object disableMetri public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("nestingSeparator", this.nestingSeparator); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DocumentDbCollectionSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DocumentDbCollectionSink if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DocumentDbCollectionSink. + */ + public static DocumentDbCollectionSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DocumentDbCollectionSink deserializedDocumentDbCollectionSink = new DocumentDbCollectionSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDocumentDbCollectionSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDocumentDbCollectionSink.type = reader.getString(); + } else if ("nestingSeparator".equals(fieldName)) { + deserializedDocumentDbCollectionSink.nestingSeparator = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedDocumentDbCollectionSink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDocumentDbCollectionSink.withAdditionalProperties(additionalProperties); + + return deserializedDocumentDbCollectionSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java index f0202ef4be58..180473aa352d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DocumentDbCollectionSource.java @@ -5,53 +5,43 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Document Database Collection source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DocumentDbCollectionSource.class, - visible = true) -@JsonTypeName("DocumentDbCollectionSource") @Fluent public final class DocumentDbCollectionSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DocumentDbCollectionSource"; /* * Documents query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Nested properties separator. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nestingSeparator") private Object nestingSeparator; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -201,4 +191,75 @@ public DocumentDbCollectionSource withDisableMetricsCollection(Object disableMet public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("nestingSeparator", this.nestingSeparator); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DocumentDbCollectionSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DocumentDbCollectionSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the DocumentDbCollectionSource. + */ + public static DocumentDbCollectionSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DocumentDbCollectionSource deserializedDocumentDbCollectionSource = new DocumentDbCollectionSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDocumentDbCollectionSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDocumentDbCollectionSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDocumentDbCollectionSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDocumentDbCollectionSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDocumentDbCollectionSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDocumentDbCollectionSource.query = reader.readUntyped(); + } else if ("nestingSeparator".equals(fieldName)) { + deserializedDocumentDbCollectionSource.nestingSeparator = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedDocumentDbCollectionSource.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDocumentDbCollectionSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDocumentDbCollectionSource.withAdditionalProperties(additionalProperties); + + return deserializedDocumentDbCollectionSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java index e235ebcce399..e7f32fb1bcc8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DrillLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Drill server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillLinkedService.class, visible = true) -@JsonTypeName("Drill") @Fluent public final class DrillLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Drill"; /* * Drill server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private DrillLinkedServiceTypeProperties innerTypeProperties = new DrillLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DrillLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DrillLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DrillLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DrillLinkedService. + */ + public static DrillLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DrillLinkedService deserializedDrillLinkedService = new DrillLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDrillLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDrillLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDrillLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDrillLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDrillLinkedService.innerTypeProperties + = DrillLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDrillLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDrillLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDrillLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java index c26935daed63..398ab23d2a7c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Drill server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillSource.class, visible = true) -@JsonTypeName("DrillSource") @Fluent public final class DrillSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DrillSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public DrillSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DrillSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DrillSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DrillSource. + */ + public static DrillSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DrillSource deserializedDrillSource = new DrillSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDrillSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDrillSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDrillSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDrillSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedDrillSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDrillSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDrillSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDrillSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDrillSource.withAdditionalProperties(additionalProperties); + + return deserializedDrillSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java index a779f73af710..e31614344402 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DrillTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DrillDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Drill server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DrillTableDataset.class, visible = true) -@JsonTypeName("DrillTable") @Fluent public final class DrillTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DrillTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private DrillDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DrillTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DrillTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DrillTableDataset. + */ + public static DrillTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DrillTableDataset deserializedDrillTableDataset = new DrillTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDrillTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDrillTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDrillTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDrillTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDrillTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDrillTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDrillTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedDrillTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedDrillTableDataset.innerTypeProperties = DrillDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDrillTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedDrillTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java index b71b66e3ae7c..5483fdb58e60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Dynamics AX linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DynamicsAXLinkedService.class, - visible = true) -@JsonTypeName("DynamicsAX") @Fluent public final class DynamicsAXLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsAX"; /* * Dynamics AX linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private DynamicsAXLinkedServiceTypeProperties innerTypeProperties = new DynamicsAXLinkedServiceTypeProperties(); /** @@ -269,4 +261,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsAXLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsAXLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsAXLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsAXLinkedService. + */ + public static DynamicsAXLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsAXLinkedService deserializedDynamicsAXLinkedService = new DynamicsAXLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDynamicsAXLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsAXLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsAXLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsAXLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsAXLinkedService.innerTypeProperties + = DynamicsAXLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDynamicsAXLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsAXLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsAXLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java index 6b6d440fe4e2..33cbddc935bc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXResourceDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The path of the Dynamics AX OData entity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DynamicsAXResourceDataset.class, - visible = true) -@JsonTypeName("DynamicsAXResource") @Fluent public final class DynamicsAXResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsAXResource"; /* * Dynamics AX OData resource dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private DynamicsAXResourceDatasetTypeProperties innerTypeProperties = new DynamicsAXResourceDatasetTypeProperties(); /** @@ -169,4 +161,81 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsAXResourceDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsAXResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsAXResourceDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsAXResourceDataset. + */ + public static DynamicsAXResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsAXResourceDataset deserializedDynamicsAXResourceDataset = new DynamicsAXResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDynamicsAXResourceDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsAXResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsAXResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.innerTypeProperties + = DynamicsAXResourceDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDynamicsAXResourceDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsAXResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsAXResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java index cdc79c11d4ae..270ecd51945b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsAXSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Dynamics AX source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsAXSource.class, visible = true) -@JsonTypeName("DynamicsAXSource") @Fluent public final class DynamicsAXSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsAXSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -35,7 +32,6 @@ public final class DynamicsAXSource extends TabularSource { * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -163,4 +159,75 @@ public DynamicsAXSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsAXSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsAXSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DynamicsAXSource. + */ + public static DynamicsAXSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsAXSource deserializedDynamicsAXSource = new DynamicsAXSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDynamicsAXSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDynamicsAXSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDynamicsAXSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDynamicsAXSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedDynamicsAXSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDynamicsAXSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDynamicsAXSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDynamicsAXSource.query = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedDynamicsAXSource.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsAXSource.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsAXSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java index 529cde23fc77..08412809bc9f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmEntityDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmEntityDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Dynamics CRM entity dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DynamicsCrmEntityDataset.class, - visible = true) -@JsonTypeName("DynamicsCrmEntity") @Fluent public final class DynamicsCrmEntityDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsCrmEntity"; /* * Dynamics CRM entity dataset properties. */ - @JsonProperty(value = "typeProperties") private DynamicsCrmEntityDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmEntityDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmEntityDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsCrmEntityDataset. + */ + public static DynamicsCrmEntityDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmEntityDataset deserializedDynamicsCrmEntityDataset = new DynamicsCrmEntityDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsCrmEntityDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsCrmEntityDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsCrmEntityDataset.innerTypeProperties + = DynamicsCrmEntityDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsCrmEntityDataset.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsCrmEntityDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java index d00af8f8ebe6..62ec3f440254 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Dynamics CRM linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = DynamicsCrmLinkedService.class, - visible = true) -@JsonTypeName("DynamicsCrm") @Fluent public final class DynamicsCrmLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsCrm"; /* * Dynamics CRM linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private DynamicsCrmLinkedServiceTypeProperties innerTypeProperties = new DynamicsCrmLinkedServiceTypeProperties(); /** @@ -231,7 +223,8 @@ public DynamicsCrmLinkedService withOrganizationName(Object organizationName) { /** * Get the authenticationType property: The authentication type to connect to Dynamics CRM server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @return the authenticationType value. */ @@ -242,7 +235,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Dynamics CRM server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the DynamicsCrmLinkedService object itself. @@ -255,6 +249,31 @@ public DynamicsCrmLinkedService withAuthenticationType(Object authenticationType return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().domain(); + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the DynamicsCrmLinkedService object itself. + */ + public DynamicsCrmLinkedService withDomain(Object domain) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new DynamicsCrmLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withDomain(domain); + return this; + } + /** * Get the username property: User name to access the Dynamics CRM instance. Type: string (or Expression with * resultType string). @@ -450,4 +469,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsCrmLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsCrmLinkedService. + */ + public static DynamicsCrmLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmLinkedService deserializedDynamicsCrmLinkedService = new DynamicsCrmLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDynamicsCrmLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsCrmLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsCrmLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsCrmLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsCrmLinkedService.innerTypeProperties + = DynamicsCrmLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDynamicsCrmLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsCrmLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsCrmLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java index fafa6ee68e43..66254998d062 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSink.java @@ -6,43 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Dynamics CRM sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsCrmSink.class, visible = true) -@JsonTypeName("DynamicsCrmSink") @Fluent public final class DynamicsCrmSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsCrmSink"; /* * The write behavior for the operation. */ - @JsonProperty(value = "writeBehavior", required = true) private DynamicsSinkWriteBehavior writeBehavior; /* * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. * Default is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; /** @@ -194,4 +189,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsCrmSink.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + jsonWriter.writeUntypedField("alternateKeyName", this.alternateKeyName); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsCrmSink. + */ + public static DynamicsCrmSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmSink deserializedDynamicsCrmSink = new DynamicsCrmSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedDynamicsCrmSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedDynamicsCrmSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedDynamicsCrmSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedDynamicsCrmSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDynamicsCrmSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDynamicsCrmSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("writeBehavior".equals(fieldName)) { + deserializedDynamicsCrmSink.writeBehavior + = DynamicsSinkWriteBehavior.fromString(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedDynamicsCrmSink.type = reader.getString(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedDynamicsCrmSink.ignoreNullValues = reader.readUntyped(); + } else if ("alternateKeyName".equals(fieldName)) { + deserializedDynamicsCrmSink.alternateKeyName = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsCrmSink.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsCrmSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java index bb4e50ad9931..707a11dcb6cf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsCrmSource.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Dynamics CRM source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsCrmSource.class, visible = true) -@JsonTypeName("DynamicsCrmSource") @Fluent public final class DynamicsCrmSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsCrmSource"; /* * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public DynamicsCrmSource withDisableMetricsCollection(Object disableMetricsColle public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsCrmSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsCrmSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DynamicsCrmSource. + */ + public static DynamicsCrmSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsCrmSource deserializedDynamicsCrmSource = new DynamicsCrmSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDynamicsCrmSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDynamicsCrmSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDynamicsCrmSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDynamicsCrmSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDynamicsCrmSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDynamicsCrmSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDynamicsCrmSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsCrmSource.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsCrmSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java index b538774f1c17..89ac1dac2841 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsEntityDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsEntityDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Dynamics entity dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsEntityDataset.class, visible = true) -@JsonTypeName("DynamicsEntity") @Fluent public final class DynamicsEntityDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsEntity"; /* * Dynamics entity dataset properties. */ - @JsonProperty(value = "typeProperties") private DynamicsEntityDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsEntityDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsEntityDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsEntityDataset. + */ + public static DynamicsEntityDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsEntityDataset deserializedDynamicsEntityDataset = new DynamicsEntityDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedDynamicsEntityDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsEntityDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedDynamicsEntityDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedDynamicsEntityDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsEntityDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsEntityDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedDynamicsEntityDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedDynamicsEntityDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsEntityDataset.innerTypeProperties + = DynamicsEntityDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsEntityDataset.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsEntityDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java index 90ddde44eabf..5242eab83a6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DynamicsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Dynamics linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsLinkedService.class, visible = true) -@JsonTypeName("Dynamics") @Fluent public final class DynamicsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Dynamics"; /* * Dynamics linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private DynamicsLinkedServiceTypeProperties innerTypeProperties = new DynamicsLinkedServiceTypeProperties(); /** @@ -225,7 +221,8 @@ public DynamicsLinkedService withOrganizationName(Object organizationName) { /** * Get the authenticationType property: The authentication type to connect to Dynamics server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @return the authenticationType value. */ @@ -236,7 +233,8 @@ public Object authenticationType() { /** * Set the authenticationType property: The authentication type to connect to Dynamics server. 'Office365' for * online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server - * authentication in online scenario. Type: string (or Expression with resultType string). + * authentication in online scenario, 'Active Directory' for Dynamics on-premises with IFD. Type: string (or + * Expression with resultType string). * * @param authenticationType the authenticationType value to set. * @return the DynamicsLinkedService object itself. @@ -249,6 +247,31 @@ public DynamicsLinkedService withAuthenticationType(Object authenticationType) { return this; } + /** + * Get the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @return the domain value. + */ + public Object domain() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().domain(); + } + + /** + * Set the domain property: The Active Directory domain that will verify user credentials. Type: string (or + * Expression with resultType string). + * + * @param domain the domain value to set. + * @return the DynamicsLinkedService object itself. + */ + public DynamicsLinkedService withDomain(Object domain) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new DynamicsLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withDomain(domain); + return this; + } + /** * Get the username property: User name to access the Dynamics instance. Type: string (or Expression with resultType * string). @@ -444,4 +467,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsLinkedService. + */ + public static DynamicsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsLinkedService deserializedDynamicsLinkedService = new DynamicsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedDynamicsLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedDynamicsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedDynamicsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedDynamicsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedDynamicsLinkedService.innerTypeProperties + = DynamicsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedDynamicsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java index b8704fce4f08..99a501e73537 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSink.java @@ -6,43 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Dynamics sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsSink.class, visible = true) -@JsonTypeName("DynamicsSink") @Fluent public final class DynamicsSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsSink"; /* * The write behavior for the operation. */ - @JsonProperty(value = "writeBehavior", required = true) private DynamicsSinkWriteBehavior writeBehavior; /* * The flag indicating whether ignore null values from input dataset (except key fields) during write operation. * Default is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /* * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "alternateKeyName") private Object alternateKeyName; /** @@ -194,4 +189,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(DynamicsSink.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + jsonWriter.writeUntypedField("alternateKeyName", this.alternateKeyName); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the DynamicsSink. + */ + public static DynamicsSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsSink deserializedDynamicsSink = new DynamicsSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedDynamicsSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedDynamicsSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedDynamicsSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedDynamicsSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDynamicsSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDynamicsSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("writeBehavior".equals(fieldName)) { + deserializedDynamicsSink.writeBehavior = DynamicsSinkWriteBehavior.fromString(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedDynamicsSink.type = reader.getString(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedDynamicsSink.ignoreNullValues = reader.readUntyped(); + } else if ("alternateKeyName".equals(fieldName)) { + deserializedDynamicsSink.alternateKeyName = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsSink.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSinkWriteBehavior.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSinkWriteBehavior.java index 3a1358f8ea01..37a3a5b6dc62 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSinkWriteBehavior.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSinkWriteBehavior.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public DynamicsSinkWriteBehavior() { * @param name a name to look for. * @return the corresponding DynamicsSinkWriteBehavior. */ - @JsonCreator public static DynamicsSinkWriteBehavior fromString(String name) { return fromString(name, DynamicsSinkWriteBehavior.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java index abf50cdb778b..e6fbac61d9a0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/DynamicsSource.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Dynamics source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DynamicsSource.class, visible = true) -@JsonTypeName("DynamicsSource") @Fluent public final class DynamicsSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "DynamicsSource"; /* * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public DynamicsSource withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of DynamicsSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of DynamicsSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the DynamicsSource. + */ + public static DynamicsSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + DynamicsSource deserializedDynamicsSource = new DynamicsSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedDynamicsSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedDynamicsSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedDynamicsSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedDynamicsSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedDynamicsSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedDynamicsSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedDynamicsSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedDynamicsSource.withAdditionalProperties(additionalProperties); + + return deserializedDynamicsSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java index 58136fb9e0a5..6c6356cf5b6d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.EloquaLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Eloqua server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaLinkedService.class, visible = true) -@JsonTypeName("Eloqua") @Fluent public final class EloquaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Eloqua"; /* * Eloqua server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private EloquaLinkedServiceTypeProperties innerTypeProperties = new EloquaLinkedServiceTypeProperties(); /** @@ -284,4 +280,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(EloquaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EloquaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EloquaLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EloquaLinkedService. + */ + public static EloquaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EloquaLinkedService deserializedEloquaLinkedService = new EloquaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedEloquaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedEloquaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedEloquaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedEloquaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedEloquaLinkedService.innerTypeProperties + = EloquaLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedEloquaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedEloquaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedEloquaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java index 58054d6fb252..41561fd932e5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Eloqua server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaObjectDataset.class, visible = true) -@JsonTypeName("EloquaObject") @Fluent public final class EloquaObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "EloquaObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EloquaObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EloquaObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EloquaObjectDataset. + */ + public static EloquaObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EloquaObjectDataset deserializedEloquaObjectDataset = new EloquaObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedEloquaObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedEloquaObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedEloquaObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedEloquaObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedEloquaObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedEloquaObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedEloquaObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedEloquaObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedEloquaObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedEloquaObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedEloquaObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java index d911ff07ddec..f498eeb3b7d3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EloquaSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Eloqua server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = EloquaSource.class, visible = true) -@JsonTypeName("EloquaSource") @Fluent public final class EloquaSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "EloquaSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public EloquaSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EloquaSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EloquaSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the EloquaSource. + */ + public static EloquaSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EloquaSource deserializedEloquaSource = new EloquaSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedEloquaSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedEloquaSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedEloquaSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedEloquaSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedEloquaSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedEloquaSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedEloquaSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedEloquaSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedEloquaSource.withAdditionalProperties(additionalProperties); + + return deserializedEloquaSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java index 4341f5b976b4..ea3443c2722c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EncryptionConfiguration.java @@ -6,36 +6,36 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Definition of CMK for the factory. */ @Fluent -public final class EncryptionConfiguration { +public final class EncryptionConfiguration implements JsonSerializable { /* * The name of the key in Azure Key Vault to use as Customer Managed Key. */ - @JsonProperty(value = "keyName", required = true) private String keyName; /* * The url of the Azure Key Vault used for CMK. */ - @JsonProperty(value = "vaultBaseUrl", required = true) private String vaultBaseUrl; /* * The version of the key used for CMK. If not provided, latest version will be used. */ - @JsonProperty(value = "keyVersion") private String keyVersion; /* * User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity * will be used. */ - @JsonProperty(value = "identity") private CmkIdentityDefinition identity; /** @@ -148,4 +148,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(EncryptionConfiguration.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("keyName", this.keyName); + jsonWriter.writeStringField("vaultBaseUrl", this.vaultBaseUrl); + jsonWriter.writeStringField("keyVersion", this.keyVersion); + jsonWriter.writeJsonField("identity", this.identity); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EncryptionConfiguration from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EncryptionConfiguration if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EncryptionConfiguration. + */ + public static EncryptionConfiguration fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EncryptionConfiguration deserializedEncryptionConfiguration = new EncryptionConfiguration(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("keyName".equals(fieldName)) { + deserializedEncryptionConfiguration.keyName = reader.getString(); + } else if ("vaultBaseUrl".equals(fieldName)) { + deserializedEncryptionConfiguration.vaultBaseUrl = reader.getString(); + } else if ("keyVersion".equals(fieldName)) { + deserializedEncryptionConfiguration.keyVersion = reader.getString(); + } else if ("identity".equals(fieldName)) { + deserializedEncryptionConfiguration.identity = CmkIdentityDefinition.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedEncryptionConfiguration; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EntityReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EntityReference.java index 6b50c94cab06..474db0c5a69c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EntityReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EntityReference.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The entity reference. */ @Fluent -public final class EntityReference { +public final class EntityReference implements JsonSerializable { /* * The type of this referenced entity. */ - @JsonProperty(value = "type") private IntegrationRuntimeEntityReferenceType type; /* * The name of this referenced entity. */ - @JsonProperty(value = "referenceName") private String referenceName; /** @@ -77,4 +79,44 @@ public EntityReference withReferenceName(String referenceName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EntityReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EntityReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the EntityReference. + */ + public static EntityReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EntityReference deserializedEntityReference = new EntityReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedEntityReference.type + = IntegrationRuntimeEntityReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedEntityReference.referenceName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedEntityReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java index c061df854c4f..82993539d24d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EnvironmentVariableSetup.java @@ -6,34 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.EnvironmentVariableSetupTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; /** * The custom setup of setting environment variable. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = EnvironmentVariableSetup.class, - visible = true) -@JsonTypeName("EnvironmentVariableSetup") @Fluent public final class EnvironmentVariableSetup extends CustomSetupBase { /* * The type of custom setup. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "EnvironmentVariableSetup"; /* * Add environment variable type properties. */ - @JsonProperty(value = "typeProperties", required = true) private EnvironmentVariableSetupTypeProperties innerTypeProperties = new EnvironmentVariableSetupTypeProperties(); /** @@ -125,4 +116,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(EnvironmentVariableSetup.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of EnvironmentVariableSetup from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of EnvironmentVariableSetup if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the EnvironmentVariableSetup. + */ + public static EnvironmentVariableSetup fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + EnvironmentVariableSetup deserializedEnvironmentVariableSetup = new EnvironmentVariableSetup(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("typeProperties".equals(fieldName)) { + deserializedEnvironmentVariableSetup.innerTypeProperties + = EnvironmentVariableSetupTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedEnvironmentVariableSetup.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedEnvironmentVariableSetup; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EventSubscriptionStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EventSubscriptionStatus.java index 28171ba990ea..84ba45847999 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EventSubscriptionStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/EventSubscriptionStatus.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public EventSubscriptionStatus() { * @param name a name to look for. * @return the corresponding EventSubscriptionStatus. */ - @JsonCreator public static EventSubscriptionStatus fromString(String name) { return fromString(name, EventSubscriptionStatus.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java index ee5eb4a35f98..d9ed8ae2707c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ExcelDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Excel dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExcelDataset.class, visible = true) -@JsonTypeName("Excel") @Fluent public final class ExcelDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Excel"; /* * Excel dataset properties. */ - @JsonProperty(value = "typeProperties") private ExcelDatasetTypeProperties innerTypeProperties; /** @@ -300,4 +296,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExcelDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExcelDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExcelDataset. + */ + public static ExcelDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExcelDataset deserializedExcelDataset = new ExcelDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedExcelDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedExcelDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedExcelDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedExcelDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedExcelDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedExcelDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedExcelDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedExcelDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedExcelDataset.innerTypeProperties = ExcelDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExcelDataset.withAdditionalProperties(additionalProperties); + + return deserializedExcelDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java index f020e1becd78..58f1c38221aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExcelSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity excel source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExcelSource.class, visible = true) -@JsonTypeName("ExcelSource") @Fluent public final class ExcelSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExcelSource"; /* * Excel store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public void validate() { storeSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExcelSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExcelSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ExcelSource. + */ + public static ExcelSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExcelSource deserializedExcelSource = new ExcelSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedExcelSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedExcelSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedExcelSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedExcelSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedExcelSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedExcelSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedExcelSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExcelSource.withAdditionalProperties(additionalProperties); + + return deserializedExcelSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java index d909e552cd91..d12da590a9f6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivity.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ExecuteDataFlowActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Execute data flow activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ExecuteDataFlowActivity.class, - visible = true) -@JsonTypeName("ExecuteDataFlow") @Fluent public final class ExecuteDataFlowActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExecuteDataFlow"; /* * Execute data flow activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ExecuteDataFlowActivityTypeProperties innerTypeProperties = new ExecuteDataFlowActivityTypeProperties(); /** @@ -367,4 +360,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecuteDataFlowActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteDataFlowActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteDataFlowActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecuteDataFlowActivity. + */ + public static ExecuteDataFlowActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteDataFlowActivity deserializedExecuteDataFlowActivity = new ExecuteDataFlowActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedExecuteDataFlowActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedExecuteDataFlowActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedExecuteDataFlowActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedExecuteDataFlowActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedExecuteDataFlowActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedExecuteDataFlowActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedExecuteDataFlowActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedExecuteDataFlowActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedExecuteDataFlowActivity.innerTypeProperties + = ExecuteDataFlowActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedExecuteDataFlowActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecuteDataFlowActivity.withAdditionalProperties(additionalProperties); + + return deserializedExecuteDataFlowActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java index bd9892fa202c..838bb923d4dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteDataFlowActivityTypePropertiesCompute.java @@ -5,25 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Compute properties for data flow activity. */ @Fluent -public final class ExecuteDataFlowActivityTypePropertiesCompute { +public final class ExecuteDataFlowActivityTypePropertiesCompute + implements JsonSerializable { /* * Compute type of the cluster which will execute data flow job. Possible values include: 'General', * 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "computeType") private Object computeType; /* * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. * Type: integer (or Expression with resultType integer) */ - @JsonProperty(value = "coreCount") private Object coreCount; /** @@ -83,4 +86,44 @@ public ExecuteDataFlowActivityTypePropertiesCompute withCoreCount(Object coreCou */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("computeType", this.computeType); + jsonWriter.writeUntypedField("coreCount", this.coreCount); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteDataFlowActivityTypePropertiesCompute from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteDataFlowActivityTypePropertiesCompute if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ExecuteDataFlowActivityTypePropertiesCompute. + */ + public static ExecuteDataFlowActivityTypePropertiesCompute fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteDataFlowActivityTypePropertiesCompute deserializedExecuteDataFlowActivityTypePropertiesCompute + = new ExecuteDataFlowActivityTypePropertiesCompute(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("computeType".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypePropertiesCompute.computeType = reader.readUntyped(); + } else if ("coreCount".equals(fieldName)) { + deserializedExecuteDataFlowActivityTypePropertiesCompute.coreCount = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedExecuteDataFlowActivityTypePropertiesCompute; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java index 4d93abec0534..5b87220b83f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivity.java @@ -6,42 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ExecutePipelineActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Execute pipeline activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ExecutePipelineActivity.class, - visible = true) -@JsonTypeName("ExecutePipeline") @Fluent public final class ExecutePipelineActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExecutePipeline"; /* * Execute pipeline activity policy. */ - @JsonProperty(value = "policy") private ExecutePipelineActivityPolicy policy; /* * Execute pipeline activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ExecutePipelineActivityTypeProperties innerTypeProperties = new ExecutePipelineActivityTypeProperties(); /** @@ -235,4 +226,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecutePipelineActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("policy", this.policy); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecutePipelineActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecutePipelineActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecutePipelineActivity. + */ + public static ExecutePipelineActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecutePipelineActivity deserializedExecutePipelineActivity = new ExecutePipelineActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedExecutePipelineActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedExecutePipelineActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedExecutePipelineActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedExecutePipelineActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedExecutePipelineActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedExecutePipelineActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedExecutePipelineActivity.innerTypeProperties + = ExecutePipelineActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedExecutePipelineActivity.type = reader.getString(); + } else if ("policy".equals(fieldName)) { + deserializedExecutePipelineActivity.policy = ExecutePipelineActivityPolicy.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecutePipelineActivity.withAdditionalProperties(additionalProperties); + + return deserializedExecutePipelineActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivityPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivityPolicy.java index 42ef8521e987..4daa298df2ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivityPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutePipelineActivityPolicy.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,17 +17,15 @@ * Execution policy for an execute pipeline activity. */ @Fluent -public final class ExecutePipelineActivityPolicy { +public final class ExecutePipelineActivityPolicy implements JsonSerializable { /* * When set to true, Input from activity is considered as secure and will not be logged to monitoring. */ - @JsonProperty(value = "secureInput") private Boolean secureInput; /* * Execution policy for an execute pipeline activity. */ - @JsonIgnore private Map additionalProperties; /** @@ -62,7 +61,6 @@ public ExecutePipelineActivityPolicy withSecureInput(Boolean secureInput) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -78,14 +76,6 @@ public ExecutePipelineActivityPolicy withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -93,4 +83,52 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeBooleanField("secureInput", this.secureInput); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecutePipelineActivityPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecutePipelineActivityPolicy if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ExecutePipelineActivityPolicy. + */ + public static ExecutePipelineActivityPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecutePipelineActivityPolicy deserializedExecutePipelineActivityPolicy + = new ExecutePipelineActivityPolicy(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("secureInput".equals(fieldName)) { + deserializedExecutePipelineActivityPolicy.secureInput = reader.getNullable(JsonReader::getBoolean); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecutePipelineActivityPolicy.additionalProperties = additionalProperties; + + return deserializedExecutePipelineActivityPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java index 0136489a2217..9232f7733475 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteSsisPackageActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ExecuteSsisPackageActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Execute SSIS package activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ExecuteSsisPackageActivity.class, - visible = true) -@JsonTypeName("ExecuteSSISPackage") @Fluent public final class ExecuteSsisPackageActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExecuteSSISPackage"; /* * Execute SSIS package activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ExecuteSsisPackageActivityTypeProperties innerTypeProperties = new ExecuteSsisPackageActivityTypeProperties(); @@ -438,4 +430,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecuteSsisPackageActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteSsisPackageActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteSsisPackageActivity if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecuteSsisPackageActivity. + */ + public static ExecuteSsisPackageActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteSsisPackageActivity deserializedExecuteSsisPackageActivity = new ExecuteSsisPackageActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedExecuteSsisPackageActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedExecuteSsisPackageActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedExecuteSsisPackageActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedExecuteSsisPackageActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.innerTypeProperties + = ExecuteSsisPackageActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedExecuteSsisPackageActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecuteSsisPackageActivity.withAdditionalProperties(additionalProperties); + + return deserializedExecuteSsisPackageActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java index d16cabe21c8e..ec2d911a3099 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecuteWranglingDataflowActivity.java @@ -6,42 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ExecutePowerQueryActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Execute power query activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ExecuteWranglingDataflowActivity.class, - visible = true) -@JsonTypeName("ExecuteWranglingDataflow") @Fluent public final class ExecuteWranglingDataflowActivity extends Activity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExecuteWranglingDataflow"; /* * Execute power query activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ExecutePowerQueryActivityTypeProperties innerTypeProperties = new ExecutePowerQueryActivityTypeProperties(); /* * Activity policy. */ - @JsonProperty(value = "policy") private ActivityPolicy policy; /** @@ -428,4 +419,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExecuteWranglingDataflowActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("policy", this.policy); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecuteWranglingDataflowActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecuteWranglingDataflowActivity if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecuteWranglingDataflowActivity. + */ + public static ExecuteWranglingDataflowActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecuteWranglingDataflowActivity deserializedExecuteWranglingDataflowActivity + = new ExecuteWranglingDataflowActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity + .withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedExecuteWranglingDataflowActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedExecuteWranglingDataflowActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity.innerTypeProperties + = ExecutePowerQueryActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity.type = reader.getString(); + } else if ("policy".equals(fieldName)) { + deserializedExecuteWranglingDataflowActivity.policy = ActivityPolicy.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecuteWranglingDataflowActivity.withAdditionalProperties(additionalProperties); + + return deserializedExecuteWranglingDataflowActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java index 82e839e1947f..3671ea39dce4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java @@ -5,64 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Base class for all execution activities. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExecutionActivity.class, visible = true) -@JsonTypeName("Execution") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Copy", value = CopyActivity.class), - @JsonSubTypes.Type(name = "HDInsightHive", value = HDInsightHiveActivity.class), - @JsonSubTypes.Type(name = "HDInsightPig", value = HDInsightPigActivity.class), - @JsonSubTypes.Type(name = "HDInsightMapReduce", value = HDInsightMapReduceActivity.class), - @JsonSubTypes.Type(name = "HDInsightStreaming", value = HDInsightStreamingActivity.class), - @JsonSubTypes.Type(name = "HDInsightSpark", value = HDInsightSparkActivity.class), - @JsonSubTypes.Type(name = "ExecuteSSISPackage", value = ExecuteSsisPackageActivity.class), - @JsonSubTypes.Type(name = "Custom", value = CustomActivity.class), - @JsonSubTypes.Type(name = "SqlServerStoredProcedure", value = SqlServerStoredProcedureActivity.class), - @JsonSubTypes.Type(name = "Delete", value = DeleteActivity.class), - @JsonSubTypes.Type(name = "AzureDataExplorerCommand", value = AzureDataExplorerCommandActivity.class), - @JsonSubTypes.Type(name = "Lookup", value = LookupActivity.class), - @JsonSubTypes.Type(name = "WebActivity", value = WebActivity.class), - @JsonSubTypes.Type(name = "GetMetadata", value = GetMetadataActivity.class), - @JsonSubTypes.Type(name = "AzureMLBatchExecution", value = AzureMLBatchExecutionActivity.class), - @JsonSubTypes.Type(name = "AzureMLUpdateResource", value = AzureMLUpdateResourceActivity.class), - @JsonSubTypes.Type(name = "AzureMLExecutePipeline", value = AzureMLExecutePipelineActivity.class), - @JsonSubTypes.Type(name = "DataLakeAnalyticsU-SQL", value = DataLakeAnalyticsUsqlActivity.class), - @JsonSubTypes.Type(name = "DatabricksNotebook", value = DatabricksNotebookActivity.class), - @JsonSubTypes.Type(name = "DatabricksSparkJar", value = DatabricksSparkJarActivity.class), - @JsonSubTypes.Type(name = "DatabricksSparkPython", value = DatabricksSparkPythonActivity.class), - @JsonSubTypes.Type(name = "AzureFunctionActivity", value = AzureFunctionActivity.class), - @JsonSubTypes.Type(name = "ExecuteDataFlow", value = ExecuteDataFlowActivity.class), - @JsonSubTypes.Type(name = "Script", value = ScriptActivity.class), - @JsonSubTypes.Type(name = "SynapseNotebook", value = SynapseNotebookActivity.class), - @JsonSubTypes.Type(name = "SparkJob", value = SynapseSparkJobDefinitionActivity.class) }) @Fluent public class ExecutionActivity extends Activity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Execution"; /* * Linked service reference. */ - @JsonProperty(value = "linkedServiceName") private LinkedServiceReference linkedServiceName; /* * Activity policy. */ - @JsonProperty(value = "policy") private ActivityPolicy policy; /** @@ -190,4 +158,156 @@ public void validate() { policy().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeJsonField("policy", this.policy); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExecutionActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExecutionActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExecutionActivity. + */ + public static ExecutionActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Copy".equals(discriminatorValue)) { + return CopyActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightHive".equals(discriminatorValue)) { + return HDInsightHiveActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightPig".equals(discriminatorValue)) { + return HDInsightPigActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightMapReduce".equals(discriminatorValue)) { + return HDInsightMapReduceActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightStreaming".equals(discriminatorValue)) { + return HDInsightStreamingActivity.fromJson(readerToUse.reset()); + } else if ("HDInsightSpark".equals(discriminatorValue)) { + return HDInsightSparkActivity.fromJson(readerToUse.reset()); + } else if ("ExecuteSSISPackage".equals(discriminatorValue)) { + return ExecuteSsisPackageActivity.fromJson(readerToUse.reset()); + } else if ("Custom".equals(discriminatorValue)) { + return CustomActivity.fromJson(readerToUse.reset()); + } else if ("SqlServerStoredProcedure".equals(discriminatorValue)) { + return SqlServerStoredProcedureActivity.fromJson(readerToUse.reset()); + } else if ("Delete".equals(discriminatorValue)) { + return DeleteActivity.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorerCommand".equals(discriminatorValue)) { + return AzureDataExplorerCommandActivity.fromJson(readerToUse.reset()); + } else if ("Lookup".equals(discriminatorValue)) { + return LookupActivity.fromJson(readerToUse.reset()); + } else if ("WebActivity".equals(discriminatorValue)) { + return WebActivity.fromJson(readerToUse.reset()); + } else if ("GetMetadata".equals(discriminatorValue)) { + return GetMetadataActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLBatchExecution".equals(discriminatorValue)) { + return AzureMLBatchExecutionActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLUpdateResource".equals(discriminatorValue)) { + return AzureMLUpdateResourceActivity.fromJson(readerToUse.reset()); + } else if ("AzureMLExecutePipeline".equals(discriminatorValue)) { + return AzureMLExecutePipelineActivity.fromJson(readerToUse.reset()); + } else if ("DataLakeAnalyticsU-SQL".equals(discriminatorValue)) { + return DataLakeAnalyticsUsqlActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksNotebook".equals(discriminatorValue)) { + return DatabricksNotebookActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksSparkJar".equals(discriminatorValue)) { + return DatabricksSparkJarActivity.fromJson(readerToUse.reset()); + } else if ("DatabricksSparkPython".equals(discriminatorValue)) { + return DatabricksSparkPythonActivity.fromJson(readerToUse.reset()); + } else if ("AzureFunctionActivity".equals(discriminatorValue)) { + return AzureFunctionActivity.fromJson(readerToUse.reset()); + } else if ("ExecuteDataFlow".equals(discriminatorValue)) { + return ExecuteDataFlowActivity.fromJson(readerToUse.reset()); + } else if ("Script".equals(discriminatorValue)) { + return ScriptActivity.fromJson(readerToUse.reset()); + } else if ("SynapseNotebook".equals(discriminatorValue)) { + return SynapseNotebookActivity.fromJson(readerToUse.reset()); + } else if ("SparkJob".equals(discriminatorValue)) { + return SynapseSparkJobDefinitionActivity.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static ExecutionActivity fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExecutionActivity deserializedExecutionActivity = new ExecutionActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedExecutionActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedExecutionActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedExecutionActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedExecutionActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedExecutionActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedExecutionActivity.withUserProperties(userProperties); + } else if ("type".equals(fieldName)) { + deserializedExecutionActivity.type = reader.getString(); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedExecutionActivity.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("policy".equals(fieldName)) { + deserializedExecutionActivity.policy = ActivityPolicy.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExecutionActivity.withAdditionalProperties(additionalProperties); + + return deserializedExecutionActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java index c062c9ed2bbc..ab979540292e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExportSettings.java @@ -5,40 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Export command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ExportSettings.class, visible = true) -@JsonTypeName("ExportSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "SnowflakeExportCopyCommand", value = SnowflakeExportCopyCommand.class), - @JsonSubTypes.Type( - name = "AzureDatabricksDeltaLakeExportCommand", - value = AzureDatabricksDeltaLakeExportCommand.class) }) @Fluent -public class ExportSettings { +public class ExportSettings implements JsonSerializable { /* * The export setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ExportSettings"; /* * Export command settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -61,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -77,14 +63,6 @@ public ExportSettings withAdditionalProperties(Map additionalPro return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -92,4 +70,78 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExportSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExportSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ExportSettings. + */ + public static ExportSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("SnowflakeExportCopyCommand".equals(discriminatorValue)) { + return SnowflakeExportCopyCommand.fromJson(readerToUse.reset()); + } else if ("AzureDatabricksDeltaLakeExportCommand".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeExportCommand.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static ExportSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExportSettings deserializedExportSettings = new ExportSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedExportSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedExportSettings.additionalProperties = additionalProperties; + + return deserializedExportSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java index 8e3ebedced8b..179b5e59ec8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlBatchRequest.java @@ -6,18 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A list of exposure control features. */ @Fluent -public final class ExposureControlBatchRequest { +public final class ExposureControlBatchRequest implements JsonSerializable { /* * List of exposure control features. */ - @JsonProperty(value = "exposureControlRequests", required = true) private List exposureControlRequests; /** @@ -63,4 +66,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ExposureControlBatchRequest.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("exposureControlRequests", this.exposureControlRequests, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExposureControlBatchRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExposureControlBatchRequest if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ExposureControlBatchRequest. + */ + public static ExposureControlBatchRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExposureControlBatchRequest deserializedExposureControlBatchRequest = new ExposureControlBatchRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("exposureControlRequests".equals(fieldName)) { + List exposureControlRequests + = reader.readArray(reader1 -> ExposureControlRequest.fromJson(reader1)); + deserializedExposureControlBatchRequest.exposureControlRequests = exposureControlRequests; + } else { + reader.skipChildren(); + } + } + + return deserializedExposureControlBatchRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlRequest.java index 9ed7f54325b9..5573d25e87ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExposureControlRequest.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The exposure control request. */ @Fluent -public final class ExposureControlRequest { +public final class ExposureControlRequest implements JsonSerializable { /* * The feature name. */ - @JsonProperty(value = "featureName") private String featureName; /* * The feature type. */ - @JsonProperty(value = "featureType") private String featureType; /** @@ -77,4 +79,43 @@ public ExposureControlRequest withFeatureType(String featureType) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("featureName", this.featureName); + jsonWriter.writeStringField("featureType", this.featureType); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExposureControlRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExposureControlRequest if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ExposureControlRequest. + */ + public static ExposureControlRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExposureControlRequest deserializedExposureControlRequest = new ExposureControlRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("featureName".equals(fieldName)) { + deserializedExposureControlRequest.featureName = reader.getString(); + } else if ("featureType".equals(fieldName)) { + deserializedExposureControlRequest.featureType = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedExposureControlRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java index 47c0c2f6ed72..79e1ed7ffd03 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Expression.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Data Factory expression definition. */ @Fluent -public final class Expression { +public final class Expression implements JsonSerializable { /* * Expression type. */ - @JsonProperty(value = "type", required = true) private String type = "Expression"; /* * Expression value. */ - @JsonProperty(value = "value", required = true) private String value; /** @@ -84,4 +86,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Expression.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Expression from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Expression if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Expression. + */ + public static Expression fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Expression deserializedExpression = new Expression(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedExpression.value = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedExpression; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2.java index d3ccb70bbc9f..27ea5f49b4fb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2.java @@ -5,36 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Nested representation of a complex expression. */ @Fluent -public final class ExpressionV2 { +public final class ExpressionV2 implements JsonSerializable { /* * Type of expressions supported by the system. Type: string. */ - @JsonProperty(value = "type") private ExpressionV2Type type; /* * Value for Constant/Field Type: string. */ - @JsonProperty(value = "value") private String value; /* * Expression operator value Type: list of strings. */ - @JsonProperty(value = "operators") private List operators; /* * List of nested expressions. */ - @JsonProperty(value = "operands") private List operands; /** @@ -133,4 +133,51 @@ public void validate() { operands().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("value", this.value); + jsonWriter.writeArrayField("operators", this.operators, (writer, element) -> writer.writeString(element)); + jsonWriter.writeArrayField("operands", this.operands, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ExpressionV2 from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ExpressionV2 if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ExpressionV2. + */ + public static ExpressionV2 fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ExpressionV2 deserializedExpressionV2 = new ExpressionV2(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedExpressionV2.type = ExpressionV2Type.fromString(reader.getString()); + } else if ("value".equals(fieldName)) { + deserializedExpressionV2.value = reader.getString(); + } else if ("operators".equals(fieldName)) { + List operators = reader.readArray(reader1 -> reader1.getString()); + deserializedExpressionV2.operators = operators; + } else if ("operands".equals(fieldName)) { + List operands = reader.readArray(reader1 -> ExpressionV2.fromJson(reader1)); + deserializedExpressionV2.operands = operands; + } else { + reader.skipChildren(); + } + } + + return deserializedExpressionV2; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2Type.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2Type.java index fa49ee618bcf..d53e8846dec0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2Type.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExpressionV2Type.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public ExpressionV2Type() { * @param name a name to look for. * @return the corresponding ExpressionV2Type. */ - @JsonCreator public static ExpressionV2Type fromString(String name) { return fromString(name, ExpressionV2Type.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java index bcf60bf66214..4e28b2610a00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryGitHubConfiguration.java @@ -5,45 +5,34 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Factory's GitHub repo information. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = FactoryGitHubConfiguration.class, - visible = true) -@JsonTypeName("FactoryGitHubConfiguration") @Fluent public final class FactoryGitHubConfiguration extends FactoryRepoConfiguration { /* * Type of repo configuration. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FactoryGitHubConfiguration"; /* * GitHub Enterprise host name. For example: `https://github.mydomain.com` */ - @JsonProperty(value = "hostName") private String hostname; /* * GitHub bring your own app client id. */ - @JsonProperty(value = "clientId") private String clientId; /* * GitHub bring your own app client secret information. */ - @JsonProperty(value = "clientSecret") private GitHubClientSecret clientSecret; /** @@ -188,4 +177,69 @@ public void validate() { clientSecret().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("accountName", accountName()); + jsonWriter.writeStringField("repositoryName", repositoryName()); + jsonWriter.writeStringField("collaborationBranch", collaborationBranch()); + jsonWriter.writeStringField("rootFolder", rootFolder()); + jsonWriter.writeStringField("lastCommitId", lastCommitId()); + jsonWriter.writeBooleanField("disablePublish", disablePublish()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("hostName", this.hostname); + jsonWriter.writeStringField("clientId", this.clientId); + jsonWriter.writeJsonField("clientSecret", this.clientSecret); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryGitHubConfiguration from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryGitHubConfiguration if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryGitHubConfiguration. + */ + public static FactoryGitHubConfiguration fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryGitHubConfiguration deserializedFactoryGitHubConfiguration = new FactoryGitHubConfiguration(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountName".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.withAccountName(reader.getString()); + } else if ("repositoryName".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.withRepositoryName(reader.getString()); + } else if ("collaborationBranch".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.withCollaborationBranch(reader.getString()); + } else if ("rootFolder".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.withRootFolder(reader.getString()); + } else if ("lastCommitId".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.withLastCommitId(reader.getString()); + } else if ("disablePublish".equals(fieldName)) { + deserializedFactoryGitHubConfiguration + .withDisablePublish(reader.getNullable(JsonReader::getBoolean)); + } else if ("type".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.type = reader.getString(); + } else if ("hostName".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.hostname = reader.getString(); + } else if ("clientId".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.clientId = reader.getString(); + } else if ("clientSecret".equals(fieldName)) { + deserializedFactoryGitHubConfiguration.clientSecret = GitHubClientSecret.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryGitHubConfiguration; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java index 669dea6c1f33..e6092c834d54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentity.java @@ -6,8 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; import java.util.UUID; @@ -15,30 +18,25 @@ * Identity properties of the factory resource. */ @Fluent -public final class FactoryIdentity { +public final class FactoryIdentity implements JsonSerializable { /* * The identity type. */ - @JsonProperty(value = "type", required = true) private FactoryIdentityType type; /* * The principal id of the identity. */ - @JsonProperty(value = "principalId", access = JsonProperty.Access.WRITE_ONLY) private UUID principalId; /* * The client tenant id of the identity. */ - @JsonProperty(value = "tenantId", access = JsonProperty.Access.WRITE_ONLY) private UUID tenantId; /* * List of user assigned identities for the factory. */ - @JsonProperty(value = "userAssignedIdentities") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map userAssignedIdentities; /** @@ -118,4 +116,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FactoryIdentity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeMapField("userAssignedIdentities", this.userAssignedIdentities, + (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryIdentity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryIdentity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryIdentity. + */ + public static FactoryIdentity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryIdentity deserializedFactoryIdentity = new FactoryIdentity(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedFactoryIdentity.type = FactoryIdentityType.fromString(reader.getString()); + } else if ("principalId".equals(fieldName)) { + deserializedFactoryIdentity.principalId + = reader.getNullable(nonNullReader -> UUID.fromString(nonNullReader.getString())); + } else if ("tenantId".equals(fieldName)) { + deserializedFactoryIdentity.tenantId + = reader.getNullable(nonNullReader -> UUID.fromString(nonNullReader.getString())); + } else if ("userAssignedIdentities".equals(fieldName)) { + Map userAssignedIdentities = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedFactoryIdentity.userAssignedIdentities = userAssignedIdentities; + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryIdentity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentityType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentityType.java index b0c688b2ea3a..3b79903d4c83 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentityType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryIdentityType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public FactoryIdentityType() { * @param name a name to look for. * @return the corresponding FactoryIdentityType. */ - @JsonCreator public static FactoryIdentityType fromString(String name) { return fromString(name, FactoryIdentityType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java index ef3f83299f6d..e56c1f3eca04 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FactoryInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of factory resources. */ @Fluent -public final class FactoryListResponse { +public final class FactoryListResponse implements JsonSerializable { /* * List of factories. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FactoryListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryListResponse if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryListResponse. + */ + public static FactoryListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryListResponse deserializedFactoryListResponse = new FactoryListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> FactoryInner.fromJson(reader1)); + deserializedFactoryListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedFactoryListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java index 376d45e7cbb3..98be03aff7f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoConfiguration.java @@ -6,67 +6,50 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Factory's git repo information. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = FactoryRepoConfiguration.class, - visible = true) -@JsonTypeName("FactoryRepoConfiguration") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "FactoryVSTSConfiguration", value = FactoryVstsConfiguration.class), - @JsonSubTypes.Type(name = "FactoryGitHubConfiguration", value = FactoryGitHubConfiguration.class) }) @Fluent -public class FactoryRepoConfiguration { +public class FactoryRepoConfiguration implements JsonSerializable { /* * Type of repo configuration. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FactoryRepoConfiguration"; /* * Account name. */ - @JsonProperty(value = "accountName", required = true) private String accountName; /* * Repository name. */ - @JsonProperty(value = "repositoryName", required = true) private String repositoryName; /* * Collaboration branch. */ - @JsonProperty(value = "collaborationBranch", required = true) private String collaborationBranch; /* * Root folder. */ - @JsonProperty(value = "rootFolder", required = true) private String rootFolder; /* * Last commit id. */ - @JsonProperty(value = "lastCommitId") private String lastCommitId; /* * Disable manual publish operation in ADF studio to favor automated publish. */ - @JsonProperty(value = "disablePublish") private Boolean disablePublish; /** @@ -233,4 +216,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FactoryRepoConfiguration.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("accountName", this.accountName); + jsonWriter.writeStringField("repositoryName", this.repositoryName); + jsonWriter.writeStringField("collaborationBranch", this.collaborationBranch); + jsonWriter.writeStringField("rootFolder", this.rootFolder); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("lastCommitId", this.lastCommitId); + jsonWriter.writeBooleanField("disablePublish", this.disablePublish); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryRepoConfiguration from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryRepoConfiguration if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryRepoConfiguration. + */ + public static FactoryRepoConfiguration fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("FactoryVSTSConfiguration".equals(discriminatorValue)) { + return FactoryVstsConfiguration.fromJson(readerToUse.reset()); + } else if ("FactoryGitHubConfiguration".equals(discriminatorValue)) { + return FactoryGitHubConfiguration.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static FactoryRepoConfiguration fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryRepoConfiguration deserializedFactoryRepoConfiguration = new FactoryRepoConfiguration(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountName".equals(fieldName)) { + deserializedFactoryRepoConfiguration.accountName = reader.getString(); + } else if ("repositoryName".equals(fieldName)) { + deserializedFactoryRepoConfiguration.repositoryName = reader.getString(); + } else if ("collaborationBranch".equals(fieldName)) { + deserializedFactoryRepoConfiguration.collaborationBranch = reader.getString(); + } else if ("rootFolder".equals(fieldName)) { + deserializedFactoryRepoConfiguration.rootFolder = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedFactoryRepoConfiguration.type = reader.getString(); + } else if ("lastCommitId".equals(fieldName)) { + deserializedFactoryRepoConfiguration.lastCommitId = reader.getString(); + } else if ("disablePublish".equals(fieldName)) { + deserializedFactoryRepoConfiguration.disablePublish = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryRepoConfiguration; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoUpdate.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoUpdate.java index fa26afe72421..091aebb11714 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoUpdate.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryRepoUpdate.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Factory's git repo information. */ @Fluent -public final class FactoryRepoUpdate { +public final class FactoryRepoUpdate implements JsonSerializable { /* * The factory resource id. */ - @JsonProperty(value = "factoryResourceId") private String factoryResourceId; /* * Git repo information of the factory. */ - @JsonProperty(value = "repoConfiguration") private FactoryRepoConfiguration repoConfiguration; /** @@ -80,4 +82,43 @@ public void validate() { repoConfiguration().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("factoryResourceId", this.factoryResourceId); + jsonWriter.writeJsonField("repoConfiguration", this.repoConfiguration); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryRepoUpdate from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryRepoUpdate if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FactoryRepoUpdate. + */ + public static FactoryRepoUpdate fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryRepoUpdate deserializedFactoryRepoUpdate = new FactoryRepoUpdate(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("factoryResourceId".equals(fieldName)) { + deserializedFactoryRepoUpdate.factoryResourceId = reader.getString(); + } else if ("repoConfiguration".equals(fieldName)) { + deserializedFactoryRepoUpdate.repoConfiguration = FactoryRepoConfiguration.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryRepoUpdate; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryUpdateParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryUpdateParameters.java index c30d36de9db7..d495ddc8d99a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryUpdateParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryUpdateParameters.java @@ -5,33 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FactoryUpdateProperties; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.Map; /** * Parameters for updating a factory resource. */ @Fluent -public final class FactoryUpdateParameters { +public final class FactoryUpdateParameters implements JsonSerializable { /* * The resource tags. */ - @JsonProperty(value = "tags") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map tags; /* * Managed service identity of the factory. */ - @JsonProperty(value = "identity") private FactoryIdentity identity; /* * Properties of update the factory. */ - @JsonProperty(value = "properties") private FactoryUpdateProperties innerProperties; /** @@ -125,4 +124,47 @@ public void validate() { innerProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeMapField("tags", this.tags, (writer, element) -> writer.writeString(element)); + jsonWriter.writeJsonField("identity", this.identity); + jsonWriter.writeJsonField("properties", this.innerProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryUpdateParameters from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryUpdateParameters if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the FactoryUpdateParameters. + */ + public static FactoryUpdateParameters fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryUpdateParameters deserializedFactoryUpdateParameters = new FactoryUpdateParameters(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("tags".equals(fieldName)) { + Map tags = reader.readMap(reader1 -> reader1.getString()); + deserializedFactoryUpdateParameters.tags = tags; + } else if ("identity".equals(fieldName)) { + deserializedFactoryUpdateParameters.identity = FactoryIdentity.fromJson(reader); + } else if ("properties".equals(fieldName)) { + deserializedFactoryUpdateParameters.innerProperties = FactoryUpdateProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryUpdateParameters; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java index 44148236a548..8874e5ad29df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FactoryVstsConfiguration.java @@ -6,39 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Factory's VSTS repo information. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = FactoryVstsConfiguration.class, - visible = true) -@JsonTypeName("FactoryVSTSConfiguration") @Fluent public final class FactoryVstsConfiguration extends FactoryRepoConfiguration { /* * Type of repo configuration. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FactoryVSTSConfiguration"; /* * VSTS project name. */ - @JsonProperty(value = "projectName", required = true) private String projectName; /* * VSTS tenant id. */ - @JsonProperty(value = "tenantId") private String tenantId; /** @@ -167,4 +157,65 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FactoryVstsConfiguration.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("accountName", accountName()); + jsonWriter.writeStringField("repositoryName", repositoryName()); + jsonWriter.writeStringField("collaborationBranch", collaborationBranch()); + jsonWriter.writeStringField("rootFolder", rootFolder()); + jsonWriter.writeStringField("lastCommitId", lastCommitId()); + jsonWriter.writeBooleanField("disablePublish", disablePublish()); + jsonWriter.writeStringField("projectName", this.projectName); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("tenantId", this.tenantId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FactoryVstsConfiguration from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FactoryVstsConfiguration if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FactoryVstsConfiguration. + */ + public static FactoryVstsConfiguration fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FactoryVstsConfiguration deserializedFactoryVstsConfiguration = new FactoryVstsConfiguration(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("accountName".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withAccountName(reader.getString()); + } else if ("repositoryName".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withRepositoryName(reader.getString()); + } else if ("collaborationBranch".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withCollaborationBranch(reader.getString()); + } else if ("rootFolder".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withRootFolder(reader.getString()); + } else if ("lastCommitId".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withLastCommitId(reader.getString()); + } else if ("disablePublish".equals(fieldName)) { + deserializedFactoryVstsConfiguration.withDisablePublish(reader.getNullable(JsonReader::getBoolean)); + } else if ("projectName".equals(fieldName)) { + deserializedFactoryVstsConfiguration.projectName = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedFactoryVstsConfiguration.type = reader.getString(); + } else if ("tenantId".equals(fieldName)) { + deserializedFactoryVstsConfiguration.tenantId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedFactoryVstsConfiguration; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java index 22bfd038a0c6..5790ae13e9fc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FailActivity.java @@ -6,12 +6,14 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FailActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity will fail within its own scope and output a custom error message and error code. The error message and @@ -19,21 +21,16 @@ * activity scope can be the whole pipeline or a control activity (e.g. foreach, switch, until), if the fail activity is * contained in it. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FailActivity.class, visible = true) -@JsonTypeName("Fail") @Fluent public final class FailActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Fail"; /* * Fail activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private FailActivityTypeProperties innerTypeProperties = new FailActivityTypeProperties(); /** @@ -185,4 +182,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FailActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FailActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FailActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FailActivity. + */ + public static FailActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FailActivity deserializedFailActivity = new FailActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedFailActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedFailActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedFailActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedFailActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedFailActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedFailActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedFailActivity.innerTypeProperties = FailActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedFailActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFailActivity.withAdditionalProperties(additionalProperties); + + return deserializedFailActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java index 51547fb2944a..d99c62916f8e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FileServerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * File system linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = FileServerLinkedService.class, - visible = true) -@JsonTypeName("FileServer") @Fluent public final class FileServerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileServer"; /* * File system linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private FileServerLinkedServiceTypeProperties innerTypeProperties = new FileServerLinkedServiceTypeProperties(); /** @@ -211,4 +203,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FileServerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileServerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileServerLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FileServerLinkedService. + */ + public static FileServerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileServerLinkedService deserializedFileServerLinkedService = new FileServerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedFileServerLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedFileServerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedFileServerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedFileServerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedFileServerLinkedService.innerTypeProperties + = FileServerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedFileServerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileServerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedFileServerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java index ae9953722d07..bed010ba9128 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerLocation.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of file server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileServerLocation.class, visible = true) -@JsonTypeName("FileServerLocation") @Fluent public final class FileServerLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileServerLocation"; /** @@ -67,4 +65,57 @@ public FileServerLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileServerLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileServerLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FileServerLocation. + */ + public static FileServerLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileServerLocation deserializedFileServerLocation = new FileServerLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedFileServerLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedFileServerLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFileServerLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileServerLocation.withAdditionalProperties(additionalProperties); + + return deserializedFileServerLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java index 5a94df37bc1b..2a1a8966ec13 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerReadSettings.java @@ -5,87 +5,75 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * File server read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileServerReadSettings.class, visible = true) -@JsonTypeName("FileServerReadSettings") @Fluent public final class FileServerReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileServerReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * FileServer wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * FileServer wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "fileFilter") private Object fileFilter; /** @@ -351,4 +339,87 @@ public FileServerReadSettings withDisableMetricsCollection(Object disableMetrics public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeUntypedField("fileFilter", this.fileFilter); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileServerReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileServerReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the FileServerReadSettings. + */ + public static FileServerReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileServerReadSettings deserializedFileServerReadSettings = new FileServerReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedFileServerReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedFileServerReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFileServerReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedFileServerReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedFileServerReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedFileServerReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedFileServerReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedFileServerReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedFileServerReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedFileServerReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedFileServerReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedFileServerReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("fileFilter".equals(fieldName)) { + deserializedFileServerReadSettings.fileFilter = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileServerReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedFileServerReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java index db64c1e4a0c2..95f93c276077 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileServerWriteSettings.java @@ -5,28 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * File server write settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = FileServerWriteSettings.class, - visible = true) -@JsonTypeName("FileServerWriteSettings") @Fluent public final class FileServerWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileServerWriteSettings"; /** @@ -90,4 +84,64 @@ public FileServerWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileServerWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileServerWriteSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the FileServerWriteSettings. + */ + public static FileServerWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileServerWriteSettings deserializedFileServerWriteSettings = new FileServerWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedFileServerWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedFileServerWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedFileServerWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedFileServerWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedFileServerWriteSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileServerWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedFileServerWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java index 494a4429a9f7..b3355f7434a5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileShareDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FileShareDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * An on-premises file system dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileShareDataset.class, visible = true) -@JsonTypeName("FileShare") @Fluent public final class FileShareDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileShare"; /* * On-premises file system dataset properties. */ - @JsonProperty(value = "typeProperties") private FileShareDatasetTypeProperties innerTypeProperties; /** @@ -304,4 +300,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileShareDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileShareDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FileShareDataset. + */ + public static FileShareDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileShareDataset deserializedFileShareDataset = new FileShareDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedFileShareDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedFileShareDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedFileShareDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedFileShareDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedFileShareDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedFileShareDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedFileShareDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedFileShareDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedFileShareDataset.innerTypeProperties = FileShareDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileShareDataset.withAdditionalProperties(additionalProperties); + + return deserializedFileShareDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java index e6ad3cb20dc6..a8fa36970720 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity file system sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileSystemSink.class, visible = true) -@JsonTypeName("FileSystemSink") @Fluent public final class FileSystemSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileSystemSink"; /* * The type of copy behavior for copy sink. */ - @JsonProperty(value = "copyBehavior") private Object copyBehavior; /** @@ -129,4 +126,72 @@ public FileSystemSink withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("copyBehavior", this.copyBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileSystemSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileSystemSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FileSystemSink. + */ + public static FileSystemSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileSystemSink deserializedFileSystemSink = new FileSystemSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedFileSystemSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedFileSystemSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedFileSystemSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedFileSystemSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedFileSystemSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedFileSystemSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFileSystemSink.type = reader.getString(); + } else if ("copyBehavior".equals(fieldName)) { + deserializedFileSystemSink.copyBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileSystemSink.withAdditionalProperties(additionalProperties); + + return deserializedFileSystemSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java index ae7ecf951778..2624e6c43fb7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FileSystemSource.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity file system source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FileSystemSource.class, visible = true) -@JsonTypeName("FileSystemSource") @Fluent public final class FileSystemSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FileSystemSource"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public FileSystemSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FileSystemSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FileSystemSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FileSystemSource. + */ + public static FileSystemSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FileSystemSource deserializedFileSystemSource = new FileSystemSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedFileSystemSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedFileSystemSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedFileSystemSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedFileSystemSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFileSystemSource.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedFileSystemSource.recursive = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedFileSystemSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFileSystemSource.withAdditionalProperties(additionalProperties); + + return deserializedFileSystemSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java index 837601e810da..585471f7b536 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FilterActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FilterActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Filter and return results from input array based on the conditions. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FilterActivity.class, visible = true) -@JsonTypeName("Filter") @Fluent public final class FilterActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Filter"; /* * Filter activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private FilterActivityTypeProperties innerTypeProperties = new FilterActivityTypeProperties(); /** @@ -176,4 +173,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FilterActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FilterActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FilterActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FilterActivity. + */ + public static FilterActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FilterActivity deserializedFilterActivity = new FilterActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedFilterActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedFilterActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedFilterActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedFilterActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedFilterActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedFilterActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedFilterActivity.innerTypeProperties = FilterActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedFilterActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFilterActivity.withAdditionalProperties(additionalProperties); + + return deserializedFilterActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java index cafa494131df..1b61da905ec3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Flowlet.java @@ -5,31 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FlowletTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.util.List; /** * Data flow flowlet. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Flowlet.class, visible = true) -@JsonTypeName("Flowlet") @Fluent public final class Flowlet extends DataFlow { /* * Type of data flow. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Flowlet"; /* * Flowlet type properties. */ - @JsonProperty(value = "typeProperties") private FlowletTypeProperties innerTypeProperties; /** @@ -211,4 +206,53 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Flowlet from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Flowlet if the JsonReader was pointing to an instance of it, or null if it was pointing to + * JSON null. + * @throws IOException If an error occurs while reading the Flowlet. + */ + public static Flowlet fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Flowlet deserializedFlowlet = new Flowlet(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedFlowlet.withDescription(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedFlowlet.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedFlowlet.withFolder(DataFlowFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedFlowlet.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedFlowlet.innerTypeProperties = FlowletTypeProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedFlowlet; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java index 30242c36b49b..9df0306e2743 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ForEachActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ForEachActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity is used for iterating over a collection and execute given activities. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ForEachActivity.class, visible = true) -@JsonTypeName("ForEach") @Fluent public final class ForEachActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ForEach"; /* * ForEach activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ForEachActivityTypeProperties innerTypeProperties = new ForEachActivityTypeProperties(); /** @@ -224,4 +221,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ForEachActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ForEachActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ForEachActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ForEachActivity. + */ + public static ForEachActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ForEachActivity deserializedForEachActivity = new ForEachActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedForEachActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedForEachActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedForEachActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedForEachActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedForEachActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedForEachActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedForEachActivity.innerTypeProperties = ForEachActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedForEachActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedForEachActivity.withAdditionalProperties(additionalProperties); + + return deserializedForEachActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java index 0de4ec838fb0..950d6463e379 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatReadSettings.java @@ -5,41 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Format read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FormatReadSettings.class, visible = true) -@JsonTypeName("FormatReadSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "ParquetReadSettings", value = ParquetReadSettings.class), - @JsonSubTypes.Type(name = "DelimitedTextReadSettings", value = DelimitedTextReadSettings.class), - @JsonSubTypes.Type(name = "JsonReadSettings", value = JsonReadSettings.class), - @JsonSubTypes.Type(name = "XmlReadSettings", value = XmlReadSettings.class), - @JsonSubTypes.Type(name = "BinaryReadSettings", value = BinaryReadSettings.class) }) @Fluent -public class FormatReadSettings { +public class FormatReadSettings implements JsonSerializable { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FormatReadSettings"; /* * Format read settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -62,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -78,14 +63,6 @@ public FormatReadSettings withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -93,4 +70,84 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FormatReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FormatReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FormatReadSettings. + */ + public static FormatReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("ParquetReadSettings".equals(discriminatorValue)) { + return ParquetReadSettings.fromJson(readerToUse.reset()); + } else if ("DelimitedTextReadSettings".equals(discriminatorValue)) { + return DelimitedTextReadSettings.fromJson(readerToUse.reset()); + } else if ("JsonReadSettings".equals(discriminatorValue)) { + return JsonReadSettings.fromJson(readerToUse.reset()); + } else if ("XmlReadSettings".equals(discriminatorValue)) { + return XmlReadSettings.fromJson(readerToUse.reset()); + } else if ("BinaryReadSettings".equals(discriminatorValue)) { + return BinaryReadSettings.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static FormatReadSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FormatReadSettings deserializedFormatReadSettings = new FormatReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedFormatReadSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFormatReadSettings.additionalProperties = additionalProperties; + + return deserializedFormatReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java index b752954cb996..03a951863f88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FormatWriteSettings.java @@ -5,41 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Format write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FormatWriteSettings.class, visible = true) -@JsonTypeName("FormatWriteSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AvroWriteSettings", value = AvroWriteSettings.class), - @JsonSubTypes.Type(name = "OrcWriteSettings", value = OrcWriteSettings.class), - @JsonSubTypes.Type(name = "ParquetWriteSettings", value = ParquetWriteSettings.class), - @JsonSubTypes.Type(name = "DelimitedTextWriteSettings", value = DelimitedTextWriteSettings.class), - @JsonSubTypes.Type(name = "JsonWriteSettings", value = JsonWriteSettings.class) }) @Fluent -public class FormatWriteSettings { +public class FormatWriteSettings implements JsonSerializable { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FormatWriteSettings"; /* * Format write settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -62,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -78,14 +63,6 @@ public FormatWriteSettings withAdditionalProperties(Map addition return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -93,4 +70,84 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FormatWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FormatWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FormatWriteSettings. + */ + public static FormatWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AvroWriteSettings".equals(discriminatorValue)) { + return AvroWriteSettings.fromJson(readerToUse.reset()); + } else if ("OrcWriteSettings".equals(discriminatorValue)) { + return OrcWriteSettings.fromJson(readerToUse.reset()); + } else if ("ParquetWriteSettings".equals(discriminatorValue)) { + return ParquetWriteSettings.fromJson(readerToUse.reset()); + } else if ("DelimitedTextWriteSettings".equals(discriminatorValue)) { + return DelimitedTextWriteSettings.fromJson(readerToUse.reset()); + } else if ("JsonWriteSettings".equals(discriminatorValue)) { + return JsonWriteSettings.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static FormatWriteSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FormatWriteSettings deserializedFormatWriteSettings = new FormatWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedFormatWriteSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFormatWriteSettings.additionalProperties = additionalProperties; + + return deserializedFormatWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FrequencyType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FrequencyType.java index ee9ff89fbae5..3616cecafce1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FrequencyType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FrequencyType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public FrequencyType() { * @param name a name to look for. * @return the corresponding FrequencyType. */ - @JsonCreator public static FrequencyType fromString(String name) { return fromString(name, FrequencyType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpAuthenticationType.java index f47167cad2bc..f766de7c2cde 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public FtpAuthenticationType() { * @param name a name to look for. * @return the corresponding FtpAuthenticationType. */ - @JsonCreator public static FtpAuthenticationType fromString(String name) { return fromString(name, FtpAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java index 3a5fd38b2c17..83d9f5668253 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpReadSettings.java @@ -5,82 +5,71 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Ftp read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpReadSettings.class, visible = true) -@JsonTypeName("FtpReadSettings") @Fluent public final class FtpReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FtpReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Ftp wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Ftp wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "useBinaryTransfer") private Object useBinaryTransfer; /* * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "disableChunking") private Object disableChunking; /** @@ -320,4 +309,84 @@ public FtpReadSettings withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("useBinaryTransfer", this.useBinaryTransfer); + jsonWriter.writeUntypedField("disableChunking", this.disableChunking); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FtpReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FtpReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FtpReadSettings. + */ + public static FtpReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FtpReadSettings deserializedFtpReadSettings = new FtpReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedFtpReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedFtpReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFtpReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedFtpReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedFtpReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedFtpReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedFtpReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedFtpReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedFtpReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedFtpReadSettings.fileListPath = reader.readUntyped(); + } else if ("useBinaryTransfer".equals(fieldName)) { + deserializedFtpReadSettings.useBinaryTransfer = reader.readUntyped(); + } else if ("disableChunking".equals(fieldName)) { + deserializedFtpReadSettings.disableChunking = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFtpReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedFtpReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java index 9f7d3dc3fe85..56cd1c384bd4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.FtpServerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * A FTP server Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpServerLinkedService.class, visible = true) -@JsonTypeName("FtpServer") @Fluent public final class FtpServerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FtpServer"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private FtpServerLinkedServiceTypeProperties innerTypeProperties = new FtpServerLinkedServiceTypeProperties(); /** @@ -307,4 +303,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(FtpServerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FtpServerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FtpServerLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the FtpServerLinkedService. + */ + public static FtpServerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FtpServerLinkedService deserializedFtpServerLinkedService = new FtpServerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedFtpServerLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedFtpServerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedFtpServerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedFtpServerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedFtpServerLinkedService.innerTypeProperties + = FtpServerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedFtpServerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFtpServerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedFtpServerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java index 49d946e697b5..1692214eb059 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/FtpServerLocation.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of ftp server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = FtpServerLocation.class, visible = true) -@JsonTypeName("FtpServerLocation") @Fluent public final class FtpServerLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "FtpServerLocation"; /** @@ -67,4 +65,57 @@ public FtpServerLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of FtpServerLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of FtpServerLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the FtpServerLocation. + */ + public static FtpServerLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + FtpServerLocation deserializedFtpServerLocation = new FtpServerLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedFtpServerLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedFtpServerLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedFtpServerLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedFtpServerLocation.withAdditionalProperties(additionalProperties); + + return deserializedFtpServerLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetDataFactoryOperationStatusResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetDataFactoryOperationStatusResponse.java index 5019b76998c9..1c39a81e6d79 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetDataFactoryOperationStatusResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetDataFactoryOperationStatusResponse.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,17 +17,16 @@ * Response body structure for get data factory operation status. */ @Fluent -public final class GetDataFactoryOperationStatusResponse { +public final class GetDataFactoryOperationStatusResponse + implements JsonSerializable { /* * Status of the operation. */ - @JsonProperty(value = "status") private String status; /* * Response body structure for get data factory operation status. */ - @JsonIgnore private Map additionalProperties; /** @@ -60,7 +60,6 @@ public GetDataFactoryOperationStatusResponse withStatus(String status) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -76,14 +75,6 @@ public GetDataFactoryOperationStatusResponse withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -91,4 +82,52 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("status", this.status); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GetDataFactoryOperationStatusResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GetDataFactoryOperationStatusResponse if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GetDataFactoryOperationStatusResponse. + */ + public static GetDataFactoryOperationStatusResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GetDataFactoryOperationStatusResponse deserializedGetDataFactoryOperationStatusResponse + = new GetDataFactoryOperationStatusResponse(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("status".equals(fieldName)) { + deserializedGetDataFactoryOperationStatusResponse.status = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGetDataFactoryOperationStatusResponse.additionalProperties = additionalProperties; + + return deserializedGetDataFactoryOperationStatusResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java index 9eb686446fbd..687f31acf661 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetMetadataActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GetMetadataActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Activity to get metadata of dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GetMetadataActivity.class, visible = true) -@JsonTypeName("GetMetadata") @Fluent public final class GetMetadataActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GetMetadata"; /* * GetMetadata activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private GetMetadataActivityTypeProperties innerTypeProperties = new GetMetadataActivityTypeProperties(); /** @@ -240,4 +237,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GetMetadataActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GetMetadataActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GetMetadataActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GetMetadataActivity. + */ + public static GetMetadataActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GetMetadataActivity deserializedGetMetadataActivity = new GetMetadataActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedGetMetadataActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedGetMetadataActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedGetMetadataActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedGetMetadataActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedGetMetadataActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedGetMetadataActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedGetMetadataActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedGetMetadataActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedGetMetadataActivity.innerTypeProperties + = GetMetadataActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGetMetadataActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGetMetadataActivity.withAdditionalProperties(additionalProperties); + + return deserializedGetMetadataActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetSsisObjectMetadataRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetSsisObjectMetadataRequest.java index f3828a585bd6..b7d67a8b94ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetSsisObjectMetadataRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GetSsisObjectMetadataRequest.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The request payload of get SSIS object metadata. */ @Fluent -public final class GetSsisObjectMetadataRequest { +public final class GetSsisObjectMetadataRequest implements JsonSerializable { /* * Metadata path. */ - @JsonProperty(value = "metadataPath") private String metadataPath; /** @@ -51,4 +54,40 @@ public GetSsisObjectMetadataRequest withMetadataPath(String metadataPath) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("metadataPath", this.metadataPath); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GetSsisObjectMetadataRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GetSsisObjectMetadataRequest if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GetSsisObjectMetadataRequest. + */ + public static GetSsisObjectMetadataRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GetSsisObjectMetadataRequest deserializedGetSsisObjectMetadataRequest = new GetSsisObjectMetadataRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("metadataPath".equals(fieldName)) { + deserializedGetSsisObjectMetadataRequest.metadataPath = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGetSsisObjectMetadataRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java index 61ce7d9c9ddc..03216d9a954f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubAccessTokenRequest.java @@ -6,35 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Get GitHub access token request definition. */ @Fluent -public final class GitHubAccessTokenRequest { +public final class GitHubAccessTokenRequest implements JsonSerializable { /* * GitHub access code. */ - @JsonProperty(value = "gitHubAccessCode", required = true) private String gitHubAccessCode; /* * GitHub application client ID. */ - @JsonProperty(value = "gitHubClientId") private String gitHubClientId; /* * GitHub bring your own app client secret information. */ - @JsonProperty(value = "gitHubClientSecret") private GitHubClientSecret gitHubClientSecret; /* * GitHub access token base URL. */ - @JsonProperty(value = "gitHubAccessTokenBaseUrl", required = true) private String gitHubAccessTokenBaseUrl; /** @@ -145,4 +145,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GitHubAccessTokenRequest.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("gitHubAccessCode", this.gitHubAccessCode); + jsonWriter.writeStringField("gitHubAccessTokenBaseUrl", this.gitHubAccessTokenBaseUrl); + jsonWriter.writeStringField("gitHubClientId", this.gitHubClientId); + jsonWriter.writeJsonField("gitHubClientSecret", this.gitHubClientSecret); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GitHubAccessTokenRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GitHubAccessTokenRequest if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GitHubAccessTokenRequest. + */ + public static GitHubAccessTokenRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GitHubAccessTokenRequest deserializedGitHubAccessTokenRequest = new GitHubAccessTokenRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("gitHubAccessCode".equals(fieldName)) { + deserializedGitHubAccessTokenRequest.gitHubAccessCode = reader.getString(); + } else if ("gitHubAccessTokenBaseUrl".equals(fieldName)) { + deserializedGitHubAccessTokenRequest.gitHubAccessTokenBaseUrl = reader.getString(); + } else if ("gitHubClientId".equals(fieldName)) { + deserializedGitHubAccessTokenRequest.gitHubClientId = reader.getString(); + } else if ("gitHubClientSecret".equals(fieldName)) { + deserializedGitHubAccessTokenRequest.gitHubClientSecret = GitHubClientSecret.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedGitHubAccessTokenRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubClientSecret.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubClientSecret.java index 3565d6d88621..0115b2e02e50 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubClientSecret.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GitHubClientSecret.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Client secret information for factory's bring your own app repository configuration. */ @Fluent -public final class GitHubClientSecret { +public final class GitHubClientSecret implements JsonSerializable { /* * Bring your own app client secret AKV URL. */ - @JsonProperty(value = "byoaSecretAkvUrl") private String byoaSecretAkvUrl; /* * Bring your own app client secret name in AKV. */ - @JsonProperty(value = "byoaSecretName") private String byoaSecretName; /** @@ -77,4 +79,43 @@ public GitHubClientSecret withByoaSecretName(String byoaSecretName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("byoaSecretAkvUrl", this.byoaSecretAkvUrl); + jsonWriter.writeStringField("byoaSecretName", this.byoaSecretName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GitHubClientSecret from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GitHubClientSecret if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the GitHubClientSecret. + */ + public static GitHubClientSecret fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GitHubClientSecret deserializedGitHubClientSecret = new GitHubClientSecret(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("byoaSecretAkvUrl".equals(fieldName)) { + deserializedGitHubClientSecret.byoaSecretAkvUrl = reader.getString(); + } else if ("byoaSecretName".equals(fieldName)) { + deserializedGitHubClientSecret.byoaSecretName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGitHubClientSecret; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java index 76ed6216cfbc..68f72f9e8c91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GlobalParameterResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of Global parameters. */ @Fluent -public final class GlobalParameterListResponse { +public final class GlobalParameterListResponse implements JsonSerializable { /* * List of global parameters. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GlobalParameterListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GlobalParameterListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GlobalParameterListResponse if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GlobalParameterListResponse. + */ + public static GlobalParameterListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GlobalParameterListResponse deserializedGlobalParameterListResponse = new GlobalParameterListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> GlobalParameterResourceInner.fromJson(reader1)); + deserializedGlobalParameterListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedGlobalParameterListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedGlobalParameterListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java index 9af2187fa49c..eea991d4537e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterSpecification.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Definition of a single parameter for an entity. */ @Fluent -public final class GlobalParameterSpecification { +public final class GlobalParameterSpecification implements JsonSerializable { /* * Global Parameter type. */ - @JsonProperty(value = "type", required = true) private GlobalParameterType type; /* * Value of parameter. */ - @JsonProperty(value = "value", required = true) private Object value; /** @@ -90,4 +92,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GlobalParameterSpecification.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GlobalParameterSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GlobalParameterSpecification if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GlobalParameterSpecification. + */ + public static GlobalParameterSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GlobalParameterSpecification deserializedGlobalParameterSpecification = new GlobalParameterSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedGlobalParameterSpecification.type = GlobalParameterType.fromString(reader.getString()); + } else if ("value".equals(fieldName)) { + deserializedGlobalParameterSpecification.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedGlobalParameterSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterType.java index 6d6ca8edc3db..3122e9a15fb5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GlobalParameterType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -57,7 +56,6 @@ public GlobalParameterType() { * @param name a name to look for. * @return the corresponding GlobalParameterType. */ - @JsonCreator public static GlobalParameterType fromString(String name) { return fromString(name, GlobalParameterType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsAuthenticationType.java index e902e3b56f1e..c1925ee3ba03 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -38,7 +37,6 @@ public GoogleAdWordsAuthenticationType() { * @param name a name to look for. * @return the corresponding GoogleAdWordsAuthenticationType. */ - @JsonCreator public static GoogleAdWordsAuthenticationType fromString(String name) { return fromString(name, GoogleAdWordsAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java index b3cdb3e774c0..adcb79d818d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleAdWordsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google AdWords service linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleAdWordsLinkedService.class, - visible = true) -@JsonTypeName("GoogleAdWords") @Fluent public final class GoogleAdWordsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleAdWords"; /* * Google AdWords service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GoogleAdWordsLinkedServiceTypeProperties innerTypeProperties = new GoogleAdWordsLinkedServiceTypeProperties(); @@ -528,4 +520,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleAdWordsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleAdWordsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleAdWordsLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleAdWordsLinkedService. + */ + public static GoogleAdWordsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleAdWordsLinkedService deserializedGoogleAdWordsLinkedService = new GoogleAdWordsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGoogleAdWordsLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleAdWordsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleAdWordsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleAdWordsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleAdWordsLinkedService.innerTypeProperties + = GoogleAdWordsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGoogleAdWordsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleAdWordsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGoogleAdWordsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java index d05bc19136a8..c71c2e7f8991 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google AdWords service dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleAdWordsObjectDataset.class, - visible = true) -@JsonTypeName("GoogleAdWordsObject") @Fluent public final class GoogleAdWordsObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleAdWordsObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleAdWordsObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleAdWordsObjectDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleAdWordsObjectDataset. + */ + public static GoogleAdWordsObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleAdWordsObjectDataset deserializedGoogleAdWordsObjectDataset = new GoogleAdWordsObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleAdWordsObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleAdWordsObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleAdWordsObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleAdWordsObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedGoogleAdWordsObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java index c5af09052511..17f0ba430e8e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleAdWordsSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Google AdWords service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleAdWordsSource.class, visible = true) -@JsonTypeName("GoogleAdWordsSource") @Fluent public final class GoogleAdWordsSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleAdWordsSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public GoogleAdWordsSource withDisableMetricsCollection(Object disableMetricsCol public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleAdWordsSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleAdWordsSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleAdWordsSource. + */ + public static GoogleAdWordsSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleAdWordsSource deserializedGoogleAdWordsSource = new GoogleAdWordsSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedGoogleAdWordsSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedGoogleAdWordsSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedGoogleAdWordsSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedGoogleAdWordsSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedGoogleAdWordsSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedGoogleAdWordsSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGoogleAdWordsSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedGoogleAdWordsSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleAdWordsSource.withAdditionalProperties(additionalProperties); + + return deserializedGoogleAdWordsSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryAuthenticationType.java index da93f9ba657d..7c3d421071a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -38,7 +37,6 @@ public GoogleBigQueryAuthenticationType() { * @param name a name to look for. * @return the corresponding GoogleBigQueryAuthenticationType. */ - @JsonCreator public static GoogleBigQueryAuthenticationType fromString(String name) { return fromString(name, GoogleBigQueryAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java index 96bf6ecf8d6d..2cdece5b905e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google BigQuery service linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleBigQueryLinkedService.class, - visible = true) -@JsonTypeName("GoogleBigQuery") @Fluent public final class GoogleBigQueryLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQuery"; /* * Google BigQuery service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GoogleBigQueryLinkedServiceTypeProperties innerTypeProperties = new GoogleBigQueryLinkedServiceTypeProperties(); @@ -420,4 +412,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleBigQueryLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryLinkedService. + */ + public static GoogleBigQueryLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryLinkedService deserializedGoogleBigQueryLinkedService = new GoogleBigQueryLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGoogleBigQueryLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleBigQueryLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleBigQueryLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleBigQueryLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleBigQueryLinkedService.innerTypeProperties + = GoogleBigQueryLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQueryLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQueryLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQueryLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java index e5e76b17a3dc..4bbf2cdac45d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google BigQuery service dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleBigQueryObjectDataset.class, - visible = true) -@JsonTypeName("GoogleBigQueryObject") @Fluent public final class GoogleBigQueryObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQueryObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GoogleBigQueryDatasetTypeProperties innerTypeProperties; /** @@ -212,4 +204,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryObjectDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryObjectDataset. + */ + public static GoogleBigQueryObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryObjectDataset deserializedGoogleBigQueryObjectDataset = new GoogleBigQueryObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleBigQueryObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleBigQueryObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleBigQueryObjectDataset.innerTypeProperties + = GoogleBigQueryDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQueryObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQueryObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java index 5adec7c3e1d3..894811f26bfd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQuerySource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Google BigQuery service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleBigQuerySource.class, visible = true) -@JsonTypeName("GoogleBigQuerySource") @Fluent public final class GoogleBigQuerySource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQuerySource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public GoogleBigQuerySource withDisableMetricsCollection(Object disableMetricsCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQuerySource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQuerySource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleBigQuerySource. + */ + public static GoogleBigQuerySource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQuerySource deserializedGoogleBigQuerySource = new GoogleBigQuerySource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedGoogleBigQuerySource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedGoogleBigQuerySource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedGoogleBigQuerySource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedGoogleBigQuerySource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedGoogleBigQuerySource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedGoogleBigQuerySource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQuerySource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedGoogleBigQuerySource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQuerySource.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQuerySource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2AuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2AuthenticationType.java index 94e26d6b5f4c..571b3e803b48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2AuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2AuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public GoogleBigQueryV2AuthenticationType() { * @param name a name to look for. * @return the corresponding GoogleBigQueryV2AuthenticationType. */ - @JsonCreator public static GoogleBigQueryV2AuthenticationType fromString(String name) { return fromString(name, GoogleBigQueryV2AuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java index 57e233526462..1ae580f169ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google BigQuery service linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleBigQueryV2LinkedService.class, - visible = true) -@JsonTypeName("GoogleBigQueryV2") @Fluent public final class GoogleBigQueryV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQueryV2"; /* * Google BigQuery service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GoogleBigQueryV2LinkedServiceTypeProperties innerTypeProperties = new GoogleBigQueryV2LinkedServiceTypeProperties(); @@ -289,4 +281,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleBigQueryV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryV2LinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryV2LinkedService. + */ + public static GoogleBigQueryV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryV2LinkedService deserializedGoogleBigQueryV2LinkedService + = new GoogleBigQueryV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleBigQueryV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleBigQueryV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedService.innerTypeProperties + = GoogleBigQueryV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQueryV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQueryV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQueryV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java index a330f1fe846c..4928489b135a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2ObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2DatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Google BigQuery service dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleBigQueryV2ObjectDataset.class, - visible = true) -@JsonTypeName("GoogleBigQueryV2Object") @Fluent public final class GoogleBigQueryV2ObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQueryV2Object"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GoogleBigQueryV2DatasetTypeProperties innerTypeProperties; /** @@ -187,4 +179,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryV2ObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryV2ObjectDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleBigQueryV2ObjectDataset. + */ + public static GoogleBigQueryV2ObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryV2ObjectDataset deserializedGoogleBigQueryV2ObjectDataset + = new GoogleBigQueryV2ObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleBigQueryV2ObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleBigQueryV2ObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleBigQueryV2ObjectDataset.innerTypeProperties + = GoogleBigQueryV2DatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQueryV2ObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQueryV2ObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java index 8e211f9afb14..a6bfa3475de9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleBigQueryV2Source.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Google BigQuery service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GoogleBigQueryV2Source.class, visible = true) -@JsonTypeName("GoogleBigQueryV2Source") @Fluent public final class GoogleBigQueryV2Source extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleBigQueryV2Source"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public GoogleBigQueryV2Source withDisableMetricsCollection(Object disableMetrics public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleBigQueryV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleBigQueryV2Source if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleBigQueryV2Source. + */ + public static GoogleBigQueryV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleBigQueryV2Source deserializedGoogleBigQueryV2Source = new GoogleBigQueryV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedGoogleBigQueryV2Source.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleBigQueryV2Source.withAdditionalProperties(additionalProperties); + + return deserializedGoogleBigQueryV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java index e8c13791f584..ea0a473c7c69 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleCloudStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Google Cloud Storage. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleCloudStorageLinkedService.class, - visible = true) -@JsonTypeName("GoogleCloudStorage") @Fluent public final class GoogleCloudStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleCloudStorage"; /* * Google Cloud Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GoogleCloudStorageLinkedServiceTypeProperties innerTypeProperties = new GoogleCloudStorageLinkedServiceTypeProperties(); @@ -220,4 +212,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleCloudStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleCloudStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleCloudStorageLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleCloudStorageLinkedService. + */ + public static GoogleCloudStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleCloudStorageLinkedService deserializedGoogleCloudStorageLinkedService + = new GoogleCloudStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleCloudStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleCloudStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedService.innerTypeProperties + = GoogleCloudStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGoogleCloudStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleCloudStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGoogleCloudStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java index 3be7b45ae026..dc6b6c2d7ce8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageLocation.java @@ -5,39 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of Google Cloud Storage dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleCloudStorageLocation.class, - visible = true) -@JsonTypeName("GoogleCloudStorageLocation") @Fluent public final class GoogleCloudStorageLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleCloudStorageLocation"; /* * Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "bucketName") private Object bucketName; /* * Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /** @@ -127,4 +119,63 @@ public GoogleCloudStorageLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + jsonWriter.writeUntypedField("version", this.version); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleCloudStorageLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleCloudStorageLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleCloudStorageLocation. + */ + public static GoogleCloudStorageLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleCloudStorageLocation deserializedGoogleCloudStorageLocation = new GoogleCloudStorageLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedGoogleCloudStorageLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedGoogleCloudStorageLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGoogleCloudStorageLocation.type = reader.getString(); + } else if ("bucketName".equals(fieldName)) { + deserializedGoogleCloudStorageLocation.bucketName = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedGoogleCloudStorageLocation.version = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleCloudStorageLocation.withAdditionalProperties(additionalProperties); + + return deserializedGoogleCloudStorageLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java index c5b2f4e12f1a..34d0ab8c1cc4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleCloudStorageReadSettings.java @@ -5,90 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Google Cloud Storage read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleCloudStorageReadSettings.class, - visible = true) -@JsonTypeName("GoogleCloudStorageReadSettings") @Fluent public final class GoogleCloudStorageReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleCloudStorageReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -354,4 +338,88 @@ public GoogleCloudStorageReadSettings withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleCloudStorageReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleCloudStorageReadSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the GoogleCloudStorageReadSettings. + */ + public static GoogleCloudStorageReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleCloudStorageReadSettings deserializedGoogleCloudStorageReadSettings + = new GoogleCloudStorageReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedGoogleCloudStorageReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleCloudStorageReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedGoogleCloudStorageReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java index 4c897c140b41..042ec6ab6860 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GoogleSheetsLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GoogleSheetsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for GoogleSheets. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = GoogleSheetsLinkedService.class, - visible = true) -@JsonTypeName("GoogleSheets") @Fluent public final class GoogleSheetsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GoogleSheets"; /* * GoogleSheets linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GoogleSheetsLinkedServiceTypeProperties innerTypeProperties = new GoogleSheetsLinkedServiceTypeProperties(); /** @@ -165,4 +157,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GoogleSheetsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GoogleSheetsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GoogleSheetsLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GoogleSheetsLinkedService. + */ + public static GoogleSheetsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GoogleSheetsLinkedService deserializedGoogleSheetsLinkedService = new GoogleSheetsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGoogleSheetsLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGoogleSheetsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGoogleSheetsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGoogleSheetsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGoogleSheetsLinkedService.innerTypeProperties + = GoogleSheetsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGoogleSheetsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGoogleSheetsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGoogleSheetsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java index 90130d74a953..c4d48a39ce34 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GreenplumLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Greenplum Database linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumLinkedService.class, visible = true) -@JsonTypeName("Greenplum") @Fluent public final class GreenplumLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Greenplum"; /* * Greenplum Database linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private GreenplumLinkedServiceTypeProperties innerTypeProperties = new GreenplumLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(GreenplumLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GreenplumLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GreenplumLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GreenplumLinkedService. + */ + public static GreenplumLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GreenplumLinkedService deserializedGreenplumLinkedService = new GreenplumLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedGreenplumLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGreenplumLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGreenplumLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGreenplumLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedGreenplumLinkedService.innerTypeProperties + = GreenplumLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedGreenplumLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGreenplumLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedGreenplumLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java index 40b4ad95ce5c..1fdf10c25312 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Greenplum Database source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumSource.class, visible = true) -@JsonTypeName("GreenplumSource") @Fluent public final class GreenplumSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GreenplumSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public GreenplumSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GreenplumSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GreenplumSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the GreenplumSource. + */ + public static GreenplumSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GreenplumSource deserializedGreenplumSource = new GreenplumSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedGreenplumSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedGreenplumSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedGreenplumSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedGreenplumSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedGreenplumSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedGreenplumSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedGreenplumSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedGreenplumSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGreenplumSource.withAdditionalProperties(additionalProperties); + + return deserializedGreenplumSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java index 7130b4907542..814b79a55b99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/GreenplumTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GreenplumDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Greenplum Database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = GreenplumTableDataset.class, visible = true) -@JsonTypeName("GreenplumTable") @Fluent public final class GreenplumTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "GreenplumTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GreenplumDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of GreenplumTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of GreenplumTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the GreenplumTableDataset. + */ + public static GreenplumTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + GreenplumTableDataset deserializedGreenplumTableDataset = new GreenplumTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedGreenplumTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedGreenplumTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedGreenplumTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedGreenplumTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedGreenplumTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedGreenplumTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedGreenplumTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedGreenplumTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedGreenplumTableDataset.innerTypeProperties + = GreenplumDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedGreenplumTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedGreenplumTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseAuthenticationType.java index b7d11f9b3c18..9b41b73639b0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public HBaseAuthenticationType() { * @param name a name to look for. * @return the corresponding HBaseAuthenticationType. */ - @JsonCreator public static HBaseAuthenticationType fromString(String name) { return fromString(name, HBaseAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java index b4ac3a167daa..9408aec6d092 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HBaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HBase server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseLinkedService.class, visible = true) -@JsonTypeName("HBase") @Fluent public final class HBaseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HBase"; /* * HBase server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HBaseLinkedServiceTypeProperties innerTypeProperties = new HBaseLinkedServiceTypeProperties(); /** @@ -382,4 +378,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HBaseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HBaseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HBaseLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HBaseLinkedService. + */ + public static HBaseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HBaseLinkedService deserializedHBaseLinkedService = new HBaseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHBaseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHBaseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHBaseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHBaseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHBaseLinkedService.innerTypeProperties + = HBaseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHBaseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHBaseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHBaseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java index 1afb99c79cab..86211dc0b0c9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HBase server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseObjectDataset.class, visible = true) -@JsonTypeName("HBaseObject") @Fluent public final class HBaseObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HBaseObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HBaseObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HBaseObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HBaseObjectDataset. + */ + public static HBaseObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HBaseObjectDataset deserializedHBaseObjectDataset = new HBaseObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedHBaseObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHBaseObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedHBaseObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedHBaseObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHBaseObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHBaseObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedHBaseObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedHBaseObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedHBaseObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHBaseObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedHBaseObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java index 44616b29f0ec..2619287419bb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HBaseSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity HBase server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HBaseSource.class, visible = true) -@JsonTypeName("HBaseSource") @Fluent public final class HBaseSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HBaseSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public HBaseSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HBaseSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HBaseSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HBaseSource. + */ + public static HBaseSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HBaseSource deserializedHBaseSource = new HBaseSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedHBaseSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedHBaseSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHBaseSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHBaseSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedHBaseSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedHBaseSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHBaseSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedHBaseSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHBaseSource.withAdditionalProperties(additionalProperties); + + return deserializedHBaseSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightActivityDebugInfoOption.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightActivityDebugInfoOption.java index d412d236cce7..7fcf078f555d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightActivityDebugInfoOption.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightActivityDebugInfoOption.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public HDInsightActivityDebugInfoOption() { * @param name a name to look for. * @return the corresponding HDInsightActivityDebugInfoOption. */ - @JsonCreator public static HDInsightActivityDebugInfoOption fromString(String name) { return fromString(name, HDInsightActivityDebugInfoOption.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java index 0b1db815e4d5..3ba1844f5285 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightHiveActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightHiveActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight Hive activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightHiveActivity.class, visible = true) -@JsonTypeName("HDInsightHive") @Fluent public final class HDInsightHiveActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightHive"; /* * HDInsight Hive activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightHiveActivityTypeProperties innerTypeProperties = new HDInsightHiveActivityTypeProperties(); /** @@ -335,4 +331,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightHiveActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightHiveActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightHiveActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightHiveActivity. + */ + public static HDInsightHiveActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightHiveActivity deserializedHDInsightHiveActivity = new HDInsightHiveActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedHDInsightHiveActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedHDInsightHiveActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedHDInsightHiveActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedHDInsightHiveActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedHDInsightHiveActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedHDInsightHiveActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightHiveActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedHDInsightHiveActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightHiveActivity.innerTypeProperties + = HDInsightHiveActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightHiveActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightHiveActivity.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightHiveActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java index a8b83b9658e1..9e277fd3abb9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightLinkedService.class, visible = true) -@JsonTypeName("HDInsight") @Fluent public final class HDInsightLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsight"; /* * HDInsight linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightLinkedServiceTypeProperties innerTypeProperties = new HDInsightLinkedServiceTypeProperties(); /** @@ -305,4 +301,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightLinkedService. + */ + public static HDInsightLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightLinkedService deserializedHDInsightLinkedService = new HDInsightLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHDInsightLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHDInsightLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHDInsightLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightLinkedService.innerTypeProperties + = HDInsightLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java index 5fe8d402a8da..7f457e922208 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightMapReduceActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightMapReduceActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight MapReduce activity type. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = HDInsightMapReduceActivity.class, - visible = true) -@JsonTypeName("HDInsightMapReduce") @Fluent public final class HDInsightMapReduceActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightMapReduce"; /* * HDInsight MapReduce activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightMapReduceActivityTypeProperties innerTypeProperties = new HDInsightMapReduceActivityTypeProperties(); @@ -338,4 +330,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightMapReduceActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightMapReduceActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightMapReduceActivity if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightMapReduceActivity. + */ + public static HDInsightMapReduceActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightMapReduceActivity deserializedHDInsightMapReduceActivity = new HDInsightMapReduceActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedHDInsightMapReduceActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedHDInsightMapReduceActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedHDInsightMapReduceActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightMapReduceActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.innerTypeProperties + = HDInsightMapReduceActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightMapReduceActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightMapReduceActivity.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightMapReduceActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java index d2f71e13bd7e..2d3e82aa2112 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightOnDemandLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightOnDemandLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight ondemand linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = HDInsightOnDemandLinkedService.class, - visible = true) -@JsonTypeName("HDInsightOnDemand") @Fluent public final class HDInsightOnDemandLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightOnDemand"; /* * HDInsight ondemand linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightOnDemandLinkedServiceTypeProperties innerTypeProperties = new HDInsightOnDemandLinkedServiceTypeProperties(); @@ -956,4 +948,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightOnDemandLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightOnDemandLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightOnDemandLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightOnDemandLinkedService. + */ + public static HDInsightOnDemandLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightOnDemandLinkedService deserializedHDInsightOnDemandLinkedService + = new HDInsightOnDemandLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHDInsightOnDemandLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHDInsightOnDemandLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedService.innerTypeProperties + = HDInsightOnDemandLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightOnDemandLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightOnDemandLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightOnDemandLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java index 987bb38d8a2e..c544e5ff1882 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightPigActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightPigActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight Pig activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightPigActivity.class, visible = true) -@JsonTypeName("HDInsightPig") @Fluent public final class HDInsightPigActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightPig"; /* * HDInsight Pig activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightPigActivityTypeProperties innerTypeProperties = new HDInsightPigActivityTypeProperties(); /** @@ -289,4 +285,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightPigActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightPigActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightPigActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightPigActivity. + */ + public static HDInsightPigActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightPigActivity deserializedHDInsightPigActivity = new HDInsightPigActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedHDInsightPigActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedHDInsightPigActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedHDInsightPigActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedHDInsightPigActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedHDInsightPigActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedHDInsightPigActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightPigActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedHDInsightPigActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightPigActivity.innerTypeProperties + = HDInsightPigActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightPigActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightPigActivity.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightPigActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java index 205dd4775e7c..18378d99ee2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightSparkActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightSparkActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight Spark activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HDInsightSparkActivity.class, visible = true) -@JsonTypeName("HDInsightSpark") @Fluent public final class HDInsightSparkActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightSpark"; /* * HDInsight spark activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightSparkActivityTypeProperties innerTypeProperties = new HDInsightSparkActivityTypeProperties(); /** @@ -341,4 +337,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightSparkActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightSparkActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightSparkActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightSparkActivity. + */ + public static HDInsightSparkActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightSparkActivity deserializedHDInsightSparkActivity = new HDInsightSparkActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedHDInsightSparkActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedHDInsightSparkActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedHDInsightSparkActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedHDInsightSparkActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedHDInsightSparkActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedHDInsightSparkActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightSparkActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedHDInsightSparkActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightSparkActivity.innerTypeProperties + = HDInsightSparkActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightSparkActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightSparkActivity.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightSparkActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java index 3b3d062b3694..b21d28438d02 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HDInsightStreamingActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HDInsightStreamingActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * HDInsight streaming activity type. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = HDInsightStreamingActivity.class, - visible = true) -@JsonTypeName("HDInsightStreaming") @Fluent public final class HDInsightStreamingActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HDInsightStreaming"; /* * HDInsight streaming activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private HDInsightStreamingActivityTypeProperties innerTypeProperties = new HDInsightStreamingActivityTypeProperties(); @@ -430,4 +422,86 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HDInsightStreamingActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HDInsightStreamingActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HDInsightStreamingActivity if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HDInsightStreamingActivity. + */ + public static HDInsightStreamingActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HDInsightStreamingActivity deserializedHDInsightStreamingActivity = new HDInsightStreamingActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedHDInsightStreamingActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedHDInsightStreamingActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedHDInsightStreamingActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedHDInsightStreamingActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedHDInsightStreamingActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedHDInsightStreamingActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedHDInsightStreamingActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedHDInsightStreamingActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedHDInsightStreamingActivity.innerTypeProperties + = HDInsightStreamingActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHDInsightStreamingActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHDInsightStreamingActivity.withAdditionalProperties(additionalProperties); + + return deserializedHDInsightStreamingActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java index 8d1dd4fbc6ae..72f1600edc97 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HdfsLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Hadoop Distributed File System (HDFS) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsLinkedService.class, visible = true) -@JsonTypeName("Hdfs") @Fluent public final class HdfsLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Hdfs"; /* * HDFS linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HdfsLinkedServiceTypeProperties innerTypeProperties = new HdfsLinkedServiceTypeProperties(); /** @@ -236,4 +232,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HdfsLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HdfsLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HdfsLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HdfsLinkedService. + */ + public static HdfsLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HdfsLinkedService deserializedHdfsLinkedService = new HdfsLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHdfsLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHdfsLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHdfsLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHdfsLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHdfsLinkedService.innerTypeProperties + = HdfsLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHdfsLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHdfsLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHdfsLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java index 508003d8f009..d3f12d043dd4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsLocation.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of HDFS. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsLocation.class, visible = true) -@JsonTypeName("HdfsLocation") @Fluent public final class HdfsLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HdfsLocation"; /** @@ -67,4 +65,57 @@ public HdfsLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HdfsLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HdfsLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HdfsLocation. + */ + public static HdfsLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HdfsLocation deserializedHdfsLocation = new HdfsLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedHdfsLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedHdfsLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHdfsLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHdfsLocation.withAdditionalProperties(additionalProperties); + + return deserializedHdfsLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java index e629850499c3..504a7ebc0221 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsReadSettings.java @@ -5,86 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * HDFS read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsReadSettings.class, visible = true) -@JsonTypeName("HdfsReadSettings") @Fluent public final class HdfsReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HdfsReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * HDFS wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * HDFS wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * Specifies Distcp-related settings. */ - @JsonProperty(value = "distcpSettings") private DistcpSettings distcpSettings; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /** @@ -349,4 +337,87 @@ public void validate() { distcpSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeJsonField("distcpSettings", this.distcpSettings); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HdfsReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HdfsReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HdfsReadSettings. + */ + public static HdfsReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HdfsReadSettings deserializedHdfsReadSettings = new HdfsReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHdfsReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHdfsReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHdfsReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedHdfsReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedHdfsReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedHdfsReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedHdfsReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedHdfsReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedHdfsReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedHdfsReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedHdfsReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("distcpSettings".equals(fieldName)) { + deserializedHdfsReadSettings.distcpSettings = DistcpSettings.fromJson(reader); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedHdfsReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHdfsReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedHdfsReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java index 553f43c75238..cb9086c30b6e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HdfsSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity HDFS source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HdfsSource.class, visible = true) -@JsonTypeName("HdfsSource") @Fluent public final class HdfsSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HdfsSource"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Specifies Distcp-related settings. */ - @JsonProperty(value = "distcpSettings") private DistcpSettings distcpSettings; /** @@ -143,4 +139,69 @@ public void validate() { distcpSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeJsonField("distcpSettings", this.distcpSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HdfsSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HdfsSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the HdfsSource. + */ + public static HdfsSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HdfsSource deserializedHdfsSource = new HdfsSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedHdfsSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedHdfsSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHdfsSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHdfsSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHdfsSource.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedHdfsSource.recursive = reader.readUntyped(); + } else if ("distcpSettings".equals(fieldName)) { + deserializedHdfsSource.distcpSettings = DistcpSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHdfsSource.withAdditionalProperties(additionalProperties); + + return deserializedHdfsSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveAuthenticationType.java index 0908efa0a0c1..d7947fedf399 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -48,7 +47,6 @@ public HiveAuthenticationType() { * @param name a name to look for. * @return the corresponding HiveAuthenticationType. */ - @JsonCreator public static HiveAuthenticationType fromString(String name) { return fromString(name, HiveAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java index 38f16d66bc58..fcf1b1bee6e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HiveLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Hive Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveLinkedService.class, visible = true) -@JsonTypeName("Hive") @Fluent public final class HiveLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Hive"; /* * Hive Server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HiveLinkedServiceTypeProperties innerTypeProperties = new HiveLinkedServiceTypeProperties(); /** @@ -522,4 +518,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HiveLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HiveLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HiveLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HiveLinkedService. + */ + public static HiveLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HiveLinkedService deserializedHiveLinkedService = new HiveLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHiveLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHiveLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHiveLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHiveLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHiveLinkedService.innerTypeProperties + = HiveLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHiveLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHiveLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHiveLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java index 603a9c43528f..cbf48beaea56 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HiveDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Hive Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveObjectDataset.class, visible = true) -@JsonTypeName("HiveObject") @Fluent public final class HiveObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HiveObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private HiveDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HiveObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HiveObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HiveObjectDataset. + */ + public static HiveObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HiveObjectDataset deserializedHiveObjectDataset = new HiveObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedHiveObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHiveObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedHiveObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedHiveObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHiveObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHiveObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedHiveObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedHiveObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedHiveObjectDataset.innerTypeProperties = HiveDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHiveObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedHiveObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveServerType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveServerType.java index 6b86d1c369cc..f6ec6ed8541c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveServerType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveServerType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public HiveServerType() { * @param name a name to look for. * @return the corresponding HiveServerType. */ - @JsonCreator public static HiveServerType fromString(String name) { return fromString(name, HiveServerType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java index a16b33b7ebd7..95cc03cb773a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Hive Server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HiveSource.class, visible = true) -@JsonTypeName("HiveSource") @Fluent public final class HiveSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HiveSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public HiveSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HiveSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HiveSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the HiveSource. + */ + public static HiveSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HiveSource deserializedHiveSource = new HiveSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedHiveSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedHiveSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHiveSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHiveSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedHiveSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedHiveSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHiveSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedHiveSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHiveSource.withAdditionalProperties(additionalProperties); + + return deserializedHiveSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveThriftTransportProtocol.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveThriftTransportProtocol.java index 3650dd46e236..7b938373fc8c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveThriftTransportProtocol.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HiveThriftTransportProtocol.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public HiveThriftTransportProtocol() { * @param name a name to look for. * @return the corresponding HiveThriftTransportProtocol. */ - @JsonCreator public static HiveThriftTransportProtocol fromString(String name) { return fromString(name, HiveThriftTransportProtocol.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpAuthenticationType.java index b43c210cc7fd..2cfc702a6d8c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public HttpAuthenticationType() { * @param name a name to look for. * @return the corresponding HttpAuthenticationType. */ - @JsonCreator public static HttpAuthenticationType fromString(String name) { return fromString(name, HttpAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java index 67279aeb5671..1d8f39216c25 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HttpDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * A file in an HTTP web server. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpDataset.class, visible = true) -@JsonTypeName("HttpFile") @Fluent public final class HttpDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HttpFile"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private HttpDatasetTypeProperties innerTypeProperties; /** @@ -281,4 +277,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HttpDataset. + */ + public static HttpDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpDataset deserializedHttpDataset = new HttpDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedHttpDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHttpDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedHttpDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedHttpDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHttpDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHttpDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedHttpDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedHttpDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedHttpDataset.innerTypeProperties = HttpDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHttpDataset.withAdditionalProperties(additionalProperties); + + return deserializedHttpDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java index 3e0cdb30a5ea..a9e798809e4d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HttpLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for an HTTP source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpLinkedService.class, visible = true) -@JsonTypeName("HttpServer") @Fluent public final class HttpLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HttpServer"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private HttpLinkedServiceTypeProperties innerTypeProperties = new HttpLinkedServiceTypeProperties(); /** @@ -342,4 +338,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HttpLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HttpLinkedService. + */ + public static HttpLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpLinkedService deserializedHttpLinkedService = new HttpLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHttpLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHttpLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHttpLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHttpLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHttpLinkedService.innerTypeProperties + = HttpLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHttpLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHttpLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHttpLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java index ef562a3e861e..0583b093e9dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpReadSettings.java @@ -5,58 +5,51 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Http read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpReadSettings.class, visible = true) -@JsonTypeName("HttpReadSettings") @Fluent public final class HttpReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HttpReadSettings"; /* * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestMethod") private Object requestMethod; /* * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestBody") private Object requestBody; /* * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "requestTimeout") private Object requestTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -212,4 +205,72 @@ public HttpReadSettings withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("requestMethod", this.requestMethod); + jsonWriter.writeUntypedField("requestBody", this.requestBody); + jsonWriter.writeUntypedField("additionalHeaders", this.additionalHeaders); + jsonWriter.writeUntypedField("requestTimeout", this.requestTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HttpReadSettings. + */ + public static HttpReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpReadSettings deserializedHttpReadSettings = new HttpReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHttpReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHttpReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHttpReadSettings.type = reader.getString(); + } else if ("requestMethod".equals(fieldName)) { + deserializedHttpReadSettings.requestMethod = reader.readUntyped(); + } else if ("requestBody".equals(fieldName)) { + deserializedHttpReadSettings.requestBody = reader.readUntyped(); + } else if ("additionalHeaders".equals(fieldName)) { + deserializedHttpReadSettings.additionalHeaders = reader.readUntyped(); + } else if ("requestTimeout".equals(fieldName)) { + deserializedHttpReadSettings.requestTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedHttpReadSettings.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHttpReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedHttpReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java index b7a1e53eddf0..4d2a5570dba2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpServerLocation.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of http server. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpServerLocation.class, visible = true) -@JsonTypeName("HttpServerLocation") @Fluent public final class HttpServerLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HttpServerLocation"; /* * Specify the relativeUrl of http server. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "relativeUrl") private Object relativeUrl; /** @@ -95,4 +92,60 @@ public HttpServerLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("relativeUrl", this.relativeUrl); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpServerLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpServerLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HttpServerLocation. + */ + public static HttpServerLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpServerLocation deserializedHttpServerLocation = new HttpServerLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedHttpServerLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedHttpServerLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHttpServerLocation.type = reader.getString(); + } else if ("relativeUrl".equals(fieldName)) { + deserializedHttpServerLocation.relativeUrl = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHttpServerLocation.withAdditionalProperties(additionalProperties); + + return deserializedHttpServerLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java index 7ea135957564..5c117e27555c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpSource.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for an HTTP file. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HttpSource.class, visible = true) -@JsonTypeName("HttpSource") @Fluent public final class HttpSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HttpSource"; /* @@ -29,7 +27,6 @@ public final class HttpSource extends CopySource { * System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -117,4 +114,66 @@ public HttpSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HttpSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HttpSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the HttpSource. + */ + public static HttpSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HttpSource deserializedHttpSource = new HttpSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedHttpSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedHttpSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHttpSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHttpSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHttpSource.type = reader.getString(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedHttpSource.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHttpSource.withAdditionalProperties(additionalProperties); + + return deserializedHttpSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java index d9b06fda1477..3b520d27f41d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.HubspotLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Hubspot Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotLinkedService.class, visible = true) -@JsonTypeName("Hubspot") @Fluent public final class HubspotLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Hubspot"; /* * Hubspot Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private HubspotLinkedServiceTypeProperties innerTypeProperties = new HubspotLinkedServiceTypeProperties(); /** @@ -305,4 +301,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(HubspotLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HubspotLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HubspotLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HubspotLinkedService. + */ + public static HubspotLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HubspotLinkedService deserializedHubspotLinkedService = new HubspotLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedHubspotLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHubspotLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHubspotLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHubspotLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedHubspotLinkedService.innerTypeProperties + = HubspotLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedHubspotLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHubspotLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedHubspotLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java index b76f907c3d13..6e413510cde3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Hubspot Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotObjectDataset.class, visible = true) -@JsonTypeName("HubspotObject") @Fluent public final class HubspotObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HubspotObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HubspotObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HubspotObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the HubspotObjectDataset. + */ + public static HubspotObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HubspotObjectDataset deserializedHubspotObjectDataset = new HubspotObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedHubspotObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedHubspotObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedHubspotObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedHubspotObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedHubspotObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedHubspotObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedHubspotObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedHubspotObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedHubspotObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHubspotObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedHubspotObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java index 275913bd28bf..1c9f7de2c6c2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HubspotSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Hubspot Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = HubspotSource.class, visible = true) -@JsonTypeName("HubspotSource") @Fluent public final class HubspotSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "HubspotSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public HubspotSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of HubspotSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of HubspotSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the HubspotSource. + */ + public static HubspotSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + HubspotSource deserializedHubspotSource = new HubspotSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedHubspotSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedHubspotSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedHubspotSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedHubspotSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedHubspotSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedHubspotSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedHubspotSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedHubspotSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedHubspotSource.withAdditionalProperties(additionalProperties); + + return deserializedHubspotSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java index 3372a30cd388..b0e5e04caf66 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IfConditionActivity.java @@ -6,32 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.IfConditionActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property * or the ifFalseActivities property depending on the result of the expression. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = IfConditionActivity.class, visible = true) -@JsonTypeName("IfCondition") @Fluent public final class IfConditionActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "IfCondition"; /* * IfCondition activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private IfConditionActivityTypeProperties innerTypeProperties = new IfConditionActivityTypeProperties(); /** @@ -206,4 +203,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IfConditionActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IfConditionActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IfConditionActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IfConditionActivity. + */ + public static IfConditionActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IfConditionActivity deserializedIfConditionActivity = new IfConditionActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedIfConditionActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedIfConditionActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedIfConditionActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedIfConditionActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedIfConditionActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedIfConditionActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedIfConditionActivity.innerTypeProperties + = IfConditionActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedIfConditionActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIfConditionActivity.withAdditionalProperties(additionalProperties); + + return deserializedIfConditionActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaAuthenticationType.java index f0613d8bd132..8ab6a8e42ccc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public ImpalaAuthenticationType() { * @param name a name to look for. * @return the corresponding ImpalaAuthenticationType. */ - @JsonCreator public static ImpalaAuthenticationType fromString(String name) { return fromString(name, ImpalaAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java index 429551efeb42..9147e195c564 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ImpalaLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Impala server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaLinkedService.class, visible = true) -@JsonTypeName("Impala") @Fluent public final class ImpalaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Impala"; /* * Impala server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ImpalaLinkedServiceTypeProperties innerTypeProperties = new ImpalaLinkedServiceTypeProperties(); /** @@ -384,4 +380,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ImpalaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImpalaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImpalaLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ImpalaLinkedService. + */ + public static ImpalaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImpalaLinkedService deserializedImpalaLinkedService = new ImpalaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedImpalaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedImpalaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedImpalaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedImpalaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedImpalaLinkedService.innerTypeProperties + = ImpalaLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedImpalaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedImpalaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedImpalaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java index 584b747050ff..7989b5301694 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ImpalaDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Impala server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaObjectDataset.class, visible = true) -@JsonTypeName("ImpalaObject") @Fluent public final class ImpalaObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ImpalaObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private ImpalaDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImpalaObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImpalaObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ImpalaObjectDataset. + */ + public static ImpalaObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImpalaObjectDataset deserializedImpalaObjectDataset = new ImpalaObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedImpalaObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedImpalaObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedImpalaObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedImpalaObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedImpalaObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedImpalaObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedImpalaObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedImpalaObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedImpalaObjectDataset.innerTypeProperties = ImpalaDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedImpalaObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedImpalaObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java index de89e35bbc93..0ce680624558 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImpalaSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Impala server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImpalaSource.class, visible = true) -@JsonTypeName("ImpalaSource") @Fluent public final class ImpalaSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ImpalaSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ImpalaSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImpalaSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImpalaSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ImpalaSource. + */ + public static ImpalaSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImpalaSource deserializedImpalaSource = new ImpalaSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedImpalaSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedImpalaSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedImpalaSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedImpalaSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedImpalaSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedImpalaSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedImpalaSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedImpalaSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedImpalaSource.withAdditionalProperties(additionalProperties); + + return deserializedImpalaSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java index d38906d55115..127ff9668fde 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ImportSettings.java @@ -5,40 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Import command settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ImportSettings.class, visible = true) -@JsonTypeName("ImportSettings") -@JsonSubTypes({ - @JsonSubTypes.Type( - name = "AzureDatabricksDeltaLakeImportCommand", - value = AzureDatabricksDeltaLakeImportCommand.class), - @JsonSubTypes.Type(name = "SnowflakeImportCopyCommand", value = SnowflakeImportCopyCommand.class) }) @Fluent -public class ImportSettings { +public class ImportSettings implements JsonSerializable { /* * The import setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ImportSettings"; /* * Import command settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -61,7 +48,6 @@ public String type() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -77,14 +63,6 @@ public ImportSettings withAdditionalProperties(Map additionalPro return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -92,4 +70,78 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ImportSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ImportSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ImportSettings. + */ + public static ImportSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureDatabricksDeltaLakeImportCommand".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeImportCommand.fromJson(readerToUse.reset()); + } else if ("SnowflakeImportCopyCommand".equals(discriminatorValue)) { + return SnowflakeImportCopyCommand.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static ImportSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ImportSettings deserializedImportSettings = new ImportSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedImportSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedImportSettings.additionalProperties = additionalProperties; + + return deserializedImportSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java index e4bec2839c77..a316bf3b1aef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.InformixLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Informix linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixLinkedService.class, visible = true) -@JsonTypeName("Informix") @Fluent public final class InformixLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Informix"; /* * Informix linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private InformixLinkedServiceTypeProperties innerTypeProperties = new InformixLinkedServiceTypeProperties(); /** @@ -263,4 +259,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(InformixLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the InformixLinkedService. + */ + public static InformixLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixLinkedService deserializedInformixLinkedService = new InformixLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedInformixLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedInformixLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedInformixLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedInformixLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedInformixLinkedService.innerTypeProperties + = InformixLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedInformixLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedInformixLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedInformixLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java index 9611e7933ad6..04711eb31826 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Informix sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixSink.class, visible = true) -@JsonTypeName("InformixSink") @Fluent public final class InformixSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "InformixSink"; /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -131,4 +128,72 @@ public InformixSink withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the InformixSink. + */ + public static InformixSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixSink deserializedInformixSink = new InformixSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedInformixSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedInformixSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedInformixSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedInformixSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedInformixSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedInformixSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedInformixSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedInformixSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedInformixSink.withAdditionalProperties(additionalProperties); + + return deserializedInformixSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java index 3c77b3b4b2ad..45c471071124 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Informix. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixSource.class, visible = true) -@JsonTypeName("InformixSource") @Fluent public final class InformixSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "InformixSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public InformixSource withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the InformixSource. + */ + public static InformixSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixSource deserializedInformixSource = new InformixSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedInformixSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedInformixSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedInformixSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedInformixSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedInformixSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedInformixSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedInformixSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedInformixSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedInformixSource.withAdditionalProperties(additionalProperties); + + return deserializedInformixSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java index dc036bdc561e..94bd03d353dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/InformixTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.InformixTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Informix table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = InformixTableDataset.class, visible = true) -@JsonTypeName("InformixTable") @Fluent public final class InformixTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "InformixTable"; /* * Informix table dataset properties. */ - @JsonProperty(value = "typeProperties") private InformixTableDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of InformixTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of InformixTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the InformixTableDataset. + */ + public static InformixTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + InformixTableDataset deserializedInformixTableDataset = new InformixTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedInformixTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedInformixTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedInformixTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedInformixTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedInformixTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedInformixTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedInformixTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedInformixTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedInformixTableDataset.innerTypeProperties + = InformixTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedInformixTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedInformixTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java index aa4c50e4e6f6..aed05b1e19ba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntime.java @@ -5,44 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Azure Data Factory nested object which serves as a compute resource for activities. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = IntegrationRuntime.class, visible = true) -@JsonTypeName("IntegrationRuntime") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Managed", value = ManagedIntegrationRuntime.class), - @JsonSubTypes.Type(name = "SelfHosted", value = SelfHostedIntegrationRuntime.class) }) @Fluent -public class IntegrationRuntime { +public class IntegrationRuntime implements JsonSerializable { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.fromString("IntegrationRuntime"); /* * Integration runtime description. */ - @JsonProperty(value = "description") private String description; /* * Azure Data Factory nested object which serves as a compute resource for activities. */ - @JsonIgnore private Map additionalProperties; /** @@ -86,7 +74,6 @@ public IntegrationRuntime withDescription(String description) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -103,14 +90,6 @@ public IntegrationRuntime withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -118,4 +97,81 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("description", this.description); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntime from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntime if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntime. + */ + public static IntegrationRuntime fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Managed".equals(discriminatorValue)) { + return ManagedIntegrationRuntime.fromJson(readerToUse.reset()); + } else if ("SelfHosted".equals(discriminatorValue)) { + return SelfHostedIntegrationRuntime.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static IntegrationRuntime fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntime deserializedIntegrationRuntime = new IntegrationRuntime(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedIntegrationRuntime.type = IntegrationRuntimeType.fromString(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedIntegrationRuntime.description = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntime.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntime; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAuthKeyName.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAuthKeyName.java index 6851f8d2ef03..f1d50a46bedc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAuthKeyName.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAuthKeyName.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public IntegrationRuntimeAuthKeyName() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeAuthKeyName. */ - @JsonCreator public static IntegrationRuntimeAuthKeyName fromString(String name) { return fromString(name, IntegrationRuntimeAuthKeyName.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAutoUpdate.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAutoUpdate.java index 1cfe7a300d90..84354c6adb0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAutoUpdate.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeAutoUpdate.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public IntegrationRuntimeAutoUpdate() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeAutoUpdate. */ - @JsonCreator public static IntegrationRuntimeAutoUpdate fromString(String name) { return fromString(name, IntegrationRuntimeAutoUpdate.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java index 82f146f7080b..032d7dda037f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeComputeProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,60 +17,52 @@ * The compute resource properties for managed integration runtime. */ @Fluent -public final class IntegrationRuntimeComputeProperties { +public final class IntegrationRuntimeComputeProperties + implements JsonSerializable { /* * The location for managed integration runtime. The supported regions could be found on * https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities */ - @JsonProperty(value = "location") private String location; /* * The node size requirement to managed integration runtime. */ - @JsonProperty(value = "nodeSize") private String nodeSize; /* * The required number of nodes for managed integration runtime. */ - @JsonProperty(value = "numberOfNodes") private Integer numberOfNodes; /* * Maximum parallel executions count per node for managed integration runtime. */ - @JsonProperty(value = "maxParallelExecutionsPerNode") private Integer maxParallelExecutionsPerNode; /* * Data flow properties for managed integration runtime. */ - @JsonProperty(value = "dataFlowProperties") private IntegrationRuntimeDataFlowProperties dataFlowProperties; /* * VNet properties for managed integration runtime. */ - @JsonProperty(value = "vNetProperties") private IntegrationRuntimeVNetProperties vNetProperties; /* * CopyComputeScale properties for managed integration runtime. */ - @JsonProperty(value = "copyComputeScaleProperties") private CopyComputeScaleProperties copyComputeScaleProperties; /* * PipelineExternalComputeScale properties for managed integration runtime. */ - @JsonProperty(value = "pipelineExternalComputeScaleProperties") private PipelineExternalComputeScaleProperties pipelineExternalComputeScaleProperties; /* * The compute resource properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -252,7 +245,6 @@ public IntegrationRuntimeComputeProperties withPipelineExternalComputeScalePrope * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -268,14 +260,6 @@ public IntegrationRuntimeComputeProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -295,4 +279,80 @@ public void validate() { pipelineExternalComputeScaleProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("location", this.location); + jsonWriter.writeStringField("nodeSize", this.nodeSize); + jsonWriter.writeNumberField("numberOfNodes", this.numberOfNodes); + jsonWriter.writeNumberField("maxParallelExecutionsPerNode", this.maxParallelExecutionsPerNode); + jsonWriter.writeJsonField("dataFlowProperties", this.dataFlowProperties); + jsonWriter.writeJsonField("vNetProperties", this.vNetProperties); + jsonWriter.writeJsonField("copyComputeScaleProperties", this.copyComputeScaleProperties); + jsonWriter.writeJsonField("pipelineExternalComputeScaleProperties", + this.pipelineExternalComputeScaleProperties); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeComputeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeComputeProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeComputeProperties. + */ + public static IntegrationRuntimeComputeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeComputeProperties deserializedIntegrationRuntimeComputeProperties + = new IntegrationRuntimeComputeProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("location".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.location = reader.getString(); + } else if ("nodeSize".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.nodeSize = reader.getString(); + } else if ("numberOfNodes".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.numberOfNodes + = reader.getNullable(JsonReader::getInt); + } else if ("maxParallelExecutionsPerNode".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.maxParallelExecutionsPerNode + = reader.getNullable(JsonReader::getInt); + } else if ("dataFlowProperties".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.dataFlowProperties + = IntegrationRuntimeDataFlowProperties.fromJson(reader); + } else if ("vNetProperties".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.vNetProperties + = IntegrationRuntimeVNetProperties.fromJson(reader); + } else if ("copyComputeScaleProperties".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.copyComputeScaleProperties + = CopyComputeScaleProperties.fromJson(reader); + } else if ("pipelineExternalComputeScaleProperties".equals(fieldName)) { + deserializedIntegrationRuntimeComputeProperties.pipelineExternalComputeScaleProperties + = PipelineExternalComputeScaleProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeComputeProperties.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeComputeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomSetupScriptProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomSetupScriptProperties.java index 5fae2081c496..3d4e9319ca5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomSetupScriptProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomSetupScriptProperties.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Custom setup script properties for a managed dedicated integration runtime. */ @Fluent -public final class IntegrationRuntimeCustomSetupScriptProperties { +public final class IntegrationRuntimeCustomSetupScriptProperties + implements JsonSerializable { /* * The URI of the Azure blob container that contains the custom setup script. */ - @JsonProperty(value = "blobContainerUri") private String blobContainerUri; /* * The SAS token of the Azure blob container. */ - @JsonProperty(value = "sasToken") private SecureString sasToken; /** @@ -80,4 +83,44 @@ public void validate() { sasToken().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("blobContainerUri", this.blobContainerUri); + jsonWriter.writeJsonField("sasToken", this.sasToken); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeCustomSetupScriptProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeCustomSetupScriptProperties if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeCustomSetupScriptProperties. + */ + public static IntegrationRuntimeCustomSetupScriptProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeCustomSetupScriptProperties deserializedIntegrationRuntimeCustomSetupScriptProperties + = new IntegrationRuntimeCustomSetupScriptProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("blobContainerUri".equals(fieldName)) { + deserializedIntegrationRuntimeCustomSetupScriptProperties.blobContainerUri = reader.getString(); + } else if ("sasToken".equals(fieldName)) { + deserializedIntegrationRuntimeCustomSetupScriptProperties.sasToken = SecureString.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeCustomSetupScriptProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomerVirtualNetwork.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomerVirtualNetwork.java index 4f1172e856d7..88b3eb8f9106 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomerVirtualNetwork.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeCustomerVirtualNetwork.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The definition and properties of virtual network to which Azure-SSIS integration runtime will join. */ @Fluent -public final class IntegrationRuntimeCustomerVirtualNetwork { +public final class IntegrationRuntimeCustomerVirtualNetwork + implements JsonSerializable { /* * The ID of subnet to which Azure-SSIS integration runtime will join. */ - @JsonProperty(value = "subnetId") private String subnetId; /** @@ -51,4 +55,41 @@ public IntegrationRuntimeCustomerVirtualNetwork withSubnetId(String subnetId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("subnetId", this.subnetId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeCustomerVirtualNetwork from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeCustomerVirtualNetwork if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeCustomerVirtualNetwork. + */ + public static IntegrationRuntimeCustomerVirtualNetwork fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeCustomerVirtualNetwork deserializedIntegrationRuntimeCustomerVirtualNetwork + = new IntegrationRuntimeCustomerVirtualNetwork(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("subnetId".equals(fieldName)) { + deserializedIntegrationRuntimeCustomerVirtualNetwork.subnetId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeCustomerVirtualNetwork; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java index 134125c54618..6d15ae985d00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,42 +18,37 @@ * Data flow properties for managed integration runtime. */ @Fluent -public final class IntegrationRuntimeDataFlowProperties { +public final class IntegrationRuntimeDataFlowProperties + implements JsonSerializable { /* * Compute type of the cluster which will execute data flow job. */ - @JsonProperty(value = "computeType") private DataFlowComputeType computeType; /* * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. */ - @JsonProperty(value = "coreCount") private Integer coreCount; /* * Time to live (in minutes) setting of the cluster which will execute data flow job. */ - @JsonProperty(value = "timeToLive") private Integer timeToLive; /* * Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is * reached if this is set as false. Default is true. */ - @JsonProperty(value = "cleanup") private Boolean cleanup; /* * Custom properties are used to tune the data flow runtime performance. */ - @JsonProperty(value = "customProperties") private List customProperties; /* * Data flow properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -171,7 +167,6 @@ public List customProp * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -187,14 +182,6 @@ public IntegrationRuntimeDataFlowProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -205,4 +192,70 @@ public void validate() { customProperties().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("computeType", this.computeType == null ? null : this.computeType.toString()); + jsonWriter.writeNumberField("coreCount", this.coreCount); + jsonWriter.writeNumberField("timeToLive", this.timeToLive); + jsonWriter.writeBooleanField("cleanup", this.cleanup); + jsonWriter.writeArrayField("customProperties", this.customProperties, + (writer, element) -> writer.writeJson(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeDataFlowProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeDataFlowProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeDataFlowProperties. + */ + public static IntegrationRuntimeDataFlowProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeDataFlowProperties deserializedIntegrationRuntimeDataFlowProperties + = new IntegrationRuntimeDataFlowProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("computeType".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowProperties.computeType + = DataFlowComputeType.fromString(reader.getString()); + } else if ("coreCount".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowProperties.coreCount = reader.getNullable(JsonReader::getInt); + } else if ("timeToLive".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowProperties.timeToLive + = reader.getNullable(JsonReader::getInt); + } else if ("cleanup".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowProperties.cleanup + = reader.getNullable(JsonReader::getBoolean); + } else if ("customProperties".equals(fieldName)) { + List customProperties = reader.readArray( + reader1 -> IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.fromJson(reader1)); + deserializedIntegrationRuntimeDataFlowProperties.customProperties = customProperties; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeDataFlowProperties.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeDataFlowProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.java index 0a07043c2b11..3f73896e2bab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem model. */ @Fluent -public final class IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem { +public final class IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem + implements JsonSerializable { /* * Name of custom property. */ - @JsonProperty(value = "name") private String name; /* * Value of custom property. */ - @JsonProperty(value = "value") private String value; /** @@ -77,4 +80,46 @@ public IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem withValue(String */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem if the JsonReader was pointing to + * an instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the + * IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem. + */ + public static IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem deserializedIntegrationRuntimeDataFlowPropertiesCustomPropertiesItem + = new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.name = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedIntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.value = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeDataFlowPropertiesCustomPropertiesItem; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataProxyProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataProxyProperties.java index ed8a5e12be28..6e9387b37d66 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataProxyProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDataProxyProperties.java @@ -5,29 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Data proxy properties for a managed dedicated integration runtime. */ @Fluent -public final class IntegrationRuntimeDataProxyProperties { +public final class IntegrationRuntimeDataProxyProperties + implements JsonSerializable { /* * The self-hosted integration runtime reference. */ - @JsonProperty(value = "connectVia") private EntityReference connectVia; /* * The staging linked service reference. */ - @JsonProperty(value = "stagingLinkedService") private EntityReference stagingLinkedService; /* * The path to contain the staged data in the Blob storage. */ - @JsonProperty(value = "path") private String path; /** @@ -109,4 +111,48 @@ public void validate() { stagingLinkedService().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", this.connectVia); + jsonWriter.writeJsonField("stagingLinkedService", this.stagingLinkedService); + jsonWriter.writeStringField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeDataProxyProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeDataProxyProperties if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeDataProxyProperties. + */ + public static IntegrationRuntimeDataProxyProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeDataProxyProperties deserializedIntegrationRuntimeDataProxyProperties + = new IntegrationRuntimeDataProxyProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedIntegrationRuntimeDataProxyProperties.connectVia = EntityReference.fromJson(reader); + } else if ("stagingLinkedService".equals(fieldName)) { + deserializedIntegrationRuntimeDataProxyProperties.stagingLinkedService + = EntityReference.fromJson(reader); + } else if ("path".equals(fieldName)) { + deserializedIntegrationRuntimeDataProxyProperties.path = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeDataProxyProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java index 5e5e063d21b8..407f81e60f8d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeDebugResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Integration runtime debug resource. @@ -16,7 +19,6 @@ public final class IntegrationRuntimeDebugResource extends SubResourceDebugResou /* * Integration runtime properties. */ - @JsonProperty(value = "properties", required = true) private IntegrationRuntime properties; /** @@ -72,4 +74,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IntegrationRuntimeDebugResource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeDebugResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeDebugResource if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IntegrationRuntimeDebugResource. + */ + public static IntegrationRuntimeDebugResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeDebugResource deserializedIntegrationRuntimeDebugResource + = new IntegrationRuntimeDebugResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedIntegrationRuntimeDebugResource.withName(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedIntegrationRuntimeDebugResource.properties = IntegrationRuntime.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeDebugResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEdition.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEdition.java index c10ce274f19b..e5911aa50842 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEdition.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEdition.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public IntegrationRuntimeEdition() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeEdition. */ - @JsonCreator public static IntegrationRuntimeEdition fromString(String name) { return fromString(name, IntegrationRuntimeEdition.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEntityReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEntityReferenceType.java index c524d334e896..a63b4c7f7918 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEntityReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeEntityReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -40,7 +39,6 @@ public IntegrationRuntimeEntityReferenceType() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeEntityReferenceType. */ - @JsonCreator public static IntegrationRuntimeEntityReferenceType fromString(String name) { return fromString(name, IntegrationRuntimeEntityReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeInternalChannelEncryptionMode.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeInternalChannelEncryptionMode.java index c42adc17d10e..c397ed986309 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeInternalChannelEncryptionMode.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeInternalChannelEncryptionMode.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -44,7 +43,6 @@ public IntegrationRuntimeInternalChannelEncryptionMode() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeInternalChannelEncryptionMode. */ - @JsonCreator public static IntegrationRuntimeInternalChannelEncryptionMode fromString(String name) { return fromString(name, IntegrationRuntimeInternalChannelEncryptionMode.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeLicenseType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeLicenseType.java index ce26fa1b0a79..c7254217a860 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeLicenseType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeLicenseType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public IntegrationRuntimeLicenseType() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeLicenseType. */ - @JsonCreator public static IntegrationRuntimeLicenseType fromString(String name) { return fromString(name, IntegrationRuntimeLicenseType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java index f542f36107e3..9b0c82d3b430 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of integration runtime resources. */ @Fluent -public final class IntegrationRuntimeListResponse { +public final class IntegrationRuntimeListResponse implements JsonSerializable { /* * List of integration runtimes. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IntegrationRuntimeListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeListResponse if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IntegrationRuntimeListResponse. + */ + public static IntegrationRuntimeListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeListResponse deserializedIntegrationRuntimeListResponse + = new IntegrationRuntimeListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> IntegrationRuntimeResourceInner.fromJson(reader1)); + deserializedIntegrationRuntimeListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedIntegrationRuntimeListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeNodeMonitoringData.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeNodeMonitoringData.java index 26a10d13a147..aebc0e46b28f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeNodeMonitoringData.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeNodeMonitoringData.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,59 +17,51 @@ * Monitoring data for integration runtime node. */ @Fluent -public final class IntegrationRuntimeNodeMonitoringData { +public final class IntegrationRuntimeNodeMonitoringData + implements JsonSerializable { /* * Name of the integration runtime node. */ - @JsonProperty(value = "nodeName", access = JsonProperty.Access.WRITE_ONLY) private String nodeName; /* * Available memory (MB) on the integration runtime node. */ - @JsonProperty(value = "availableMemoryInMB", access = JsonProperty.Access.WRITE_ONLY) private Integer availableMemoryInMB; /* * CPU percentage on the integration runtime node. */ - @JsonProperty(value = "cpuUtilization", access = JsonProperty.Access.WRITE_ONLY) private Integer cpuUtilization; /* * Maximum concurrent jobs on the integration runtime node. */ - @JsonProperty(value = "concurrentJobsLimit", access = JsonProperty.Access.WRITE_ONLY) private Integer concurrentJobsLimit; /* * The number of jobs currently running on the integration runtime node. */ - @JsonProperty(value = "concurrentJobsRunning", access = JsonProperty.Access.WRITE_ONLY) private Integer concurrentJobsRunning; /* * The maximum concurrent jobs in this integration runtime. */ - @JsonProperty(value = "maxConcurrentJobs", access = JsonProperty.Access.WRITE_ONLY) private Integer maxConcurrentJobs; /* * Sent bytes on the integration runtime node. */ - @JsonProperty(value = "sentBytes", access = JsonProperty.Access.WRITE_ONLY) private Float sentBytes; /* * Received bytes on the integration runtime node. */ - @JsonProperty(value = "receivedBytes", access = JsonProperty.Access.WRITE_ONLY) private Float receivedBytes; /* * Monitoring data for integration runtime node. */ - @JsonIgnore private Map additionalProperties; /** @@ -154,7 +147,6 @@ public Float receivedBytes() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -170,14 +162,6 @@ public IntegrationRuntimeNodeMonitoringData withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -185,4 +169,72 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeNodeMonitoringData from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeNodeMonitoringData if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeNodeMonitoringData. + */ + public static IntegrationRuntimeNodeMonitoringData fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeNodeMonitoringData deserializedIntegrationRuntimeNodeMonitoringData + = new IntegrationRuntimeNodeMonitoringData(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("nodeName".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.nodeName = reader.getString(); + } else if ("availableMemoryInMB".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.availableMemoryInMB + = reader.getNullable(JsonReader::getInt); + } else if ("cpuUtilization".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.cpuUtilization + = reader.getNullable(JsonReader::getInt); + } else if ("concurrentJobsLimit".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.concurrentJobsLimit + = reader.getNullable(JsonReader::getInt); + } else if ("concurrentJobsRunning".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.concurrentJobsRunning + = reader.getNullable(JsonReader::getInt); + } else if ("maxConcurrentJobs".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.maxConcurrentJobs + = reader.getNullable(JsonReader::getInt); + } else if ("sentBytes".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.sentBytes + = reader.getNullable(JsonReader::getFloat); + } else if ("receivedBytes".equals(fieldName)) { + deserializedIntegrationRuntimeNodeMonitoringData.receivedBytes + = reader.getNullable(JsonReader::getFloat); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeNodeMonitoringData.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeNodeMonitoringData; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.java index 815cca0f8fff..10c661f28e24 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.java @@ -5,24 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Azure-SSIS integration runtime outbound network dependency endpoints for one category. */ @Fluent -public final class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint { +public final class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint + implements JsonSerializable { /* * The category of outbound network dependency. */ - @JsonProperty(value = "category") private String category; /* * The endpoints for outbound network dependency. */ - @JsonProperty(value = "endpoints") private List endpoints; /** @@ -82,4 +85,49 @@ public void validate() { endpoints().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("category", this.category); + jsonWriter.writeArrayField("endpoints", this.endpoints, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint if the JsonReader was + * pointing to an instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the + * IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint. + */ + public static IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint deserializedIntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint + = new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("category".equals(fieldName)) { + deserializedIntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.category + = reader.getString(); + } else if ("endpoints".equals(fieldName)) { + List endpoints = reader + .readArray(reader1 -> IntegrationRuntimeOutboundNetworkDependenciesEndpoint.fromJson(reader1)); + deserializedIntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.endpoints = endpoints; + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpoint.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpoint.java index 2023a46f6f95..1c75a0ef1243 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpoint.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpoint.java @@ -5,24 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * The endpoint for Azure-SSIS integration runtime outbound network dependency. */ @Fluent -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpoint { +public final class IntegrationRuntimeOutboundNetworkDependenciesEndpoint + implements JsonSerializable { /* * The domain name of endpoint. */ - @JsonProperty(value = "domainName") private String domainName; /* * The details of endpoint. */ - @JsonProperty(value = "endpointDetails") private List endpointDetails; /** @@ -82,4 +85,49 @@ public void validate() { endpointDetails().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("domainName", this.domainName); + jsonWriter.writeArrayField("endpointDetails", this.endpointDetails, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeOutboundNetworkDependenciesEndpoint from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeOutboundNetworkDependenciesEndpoint if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeOutboundNetworkDependenciesEndpoint. + */ + public static IntegrationRuntimeOutboundNetworkDependenciesEndpoint fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeOutboundNetworkDependenciesEndpoint deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpoint + = new IntegrationRuntimeOutboundNetworkDependenciesEndpoint(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("domainName".equals(fieldName)) { + deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpoint.domainName = reader.getString(); + } else if ("endpointDetails".equals(fieldName)) { + List endpointDetails + = reader.readArray( + reader1 -> IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.fromJson(reader1)); + deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpoint.endpointDetails = endpointDetails; + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpoint; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.java index 39c27ba42658..475f83ddfe27 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The details of Azure-SSIS integration runtime outbound network dependency endpoint. */ @Fluent -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails { +public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails + implements JsonSerializable { /* * The port of endpoint. */ - @JsonProperty(value = "port") private Integer port; /** @@ -51,4 +55,44 @@ public IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails withPort(Int */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("port", this.port); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails if the JsonReader was + * pointing to an instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the + * IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails. + */ + public static IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails fromJson(JsonReader jsonReader) + throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointDetails + = new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("port".equals(fieldName)) { + deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.port + = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeOutboundNetworkDependenciesEndpointDetails; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java index ca43e8c982b3..f848809bce07 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeReference.java @@ -6,32 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; /** * Integration runtime reference type. */ @Fluent -public final class IntegrationRuntimeReference { +public final class IntegrationRuntimeReference implements JsonSerializable { /* * Type of integration runtime. */ - @JsonProperty(value = "type", required = true) private String type = "IntegrationRuntimeReference"; /* * Reference integration runtime name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Arguments for integration runtime. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /** @@ -114,4 +113,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(IntegrationRuntimeReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("referenceName", this.referenceName); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeReference if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the IntegrationRuntimeReference. + */ + public static IntegrationRuntimeReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeReference deserializedIntegrationRuntimeReference = new IntegrationRuntimeReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceName".equals(fieldName)) { + deserializedIntegrationRuntimeReference.referenceName = reader.getString(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedIntegrationRuntimeReference.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeRegenerateKeyParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeRegenerateKeyParameters.java index b928418bcb62..922727ce9cad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeRegenerateKeyParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeRegenerateKeyParameters.java @@ -5,17 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Parameters to regenerate the authentication key. */ @Fluent -public final class IntegrationRuntimeRegenerateKeyParameters { +public final class IntegrationRuntimeRegenerateKeyParameters + implements JsonSerializable { /* * The name of the authentication key to regenerate. */ - @JsonProperty(value = "keyName") private IntegrationRuntimeAuthKeyName keyName; /** @@ -51,4 +55,42 @@ public IntegrationRuntimeRegenerateKeyParameters withKeyName(IntegrationRuntimeA */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("keyName", this.keyName == null ? null : this.keyName.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeRegenerateKeyParameters from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeRegenerateKeyParameters if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeRegenerateKeyParameters. + */ + public static IntegrationRuntimeRegenerateKeyParameters fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeRegenerateKeyParameters deserializedIntegrationRuntimeRegenerateKeyParameters + = new IntegrationRuntimeRegenerateKeyParameters(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("keyName".equals(fieldName)) { + deserializedIntegrationRuntimeRegenerateKeyParameters.keyName + = IntegrationRuntimeAuthKeyName.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedIntegrationRuntimeRegenerateKeyParameters; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java index be120129d825..006c0f648ce7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogInfo.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,42 +17,36 @@ * Catalog information for managed dedicated integration runtime. */ @Fluent -public final class IntegrationRuntimeSsisCatalogInfo { +public final class IntegrationRuntimeSsisCatalogInfo implements JsonSerializable { /* * The catalog database server URL. */ - @JsonProperty(value = "catalogServerEndpoint") private String catalogServerEndpoint; /* * The administrator user name of catalog database. */ - @JsonProperty(value = "catalogAdminUserName") private String catalogAdminUsername; /* * The password of the administrator user account of the catalog database. */ - @JsonProperty(value = "catalogAdminPassword") private SecureString catalogAdminPassword; /* * The pricing tier for the catalog database. The valid values could be found in * https://azure.microsoft.com/en-us/pricing/details/sql-database/ */ - @JsonProperty(value = "catalogPricingTier") private IntegrationRuntimeSsisCatalogPricingTier catalogPricingTier; /* * The dual standby pair name of Azure-SSIS Integration Runtimes to support SSISDB failover. */ - @JsonProperty(value = "dualStandbyPairName") private String dualStandbyPairName; /* * Catalog information for managed dedicated integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -170,7 +165,6 @@ public IntegrationRuntimeSsisCatalogInfo withDualStandbyPairName(String dualStan * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -186,14 +180,6 @@ public IntegrationRuntimeSsisCatalogInfo withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -204,4 +190,66 @@ public void validate() { catalogAdminPassword().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("catalogServerEndpoint", this.catalogServerEndpoint); + jsonWriter.writeStringField("catalogAdminUserName", this.catalogAdminUsername); + jsonWriter.writeJsonField("catalogAdminPassword", this.catalogAdminPassword); + jsonWriter.writeStringField("catalogPricingTier", + this.catalogPricingTier == null ? null : this.catalogPricingTier.toString()); + jsonWriter.writeStringField("dualStandbyPairName", this.dualStandbyPairName); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeSsisCatalogInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeSsisCatalogInfo if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeSsisCatalogInfo. + */ + public static IntegrationRuntimeSsisCatalogInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeSsisCatalogInfo deserializedIntegrationRuntimeSsisCatalogInfo + = new IntegrationRuntimeSsisCatalogInfo(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("catalogServerEndpoint".equals(fieldName)) { + deserializedIntegrationRuntimeSsisCatalogInfo.catalogServerEndpoint = reader.getString(); + } else if ("catalogAdminUserName".equals(fieldName)) { + deserializedIntegrationRuntimeSsisCatalogInfo.catalogAdminUsername = reader.getString(); + } else if ("catalogAdminPassword".equals(fieldName)) { + deserializedIntegrationRuntimeSsisCatalogInfo.catalogAdminPassword = SecureString.fromJson(reader); + } else if ("catalogPricingTier".equals(fieldName)) { + deserializedIntegrationRuntimeSsisCatalogInfo.catalogPricingTier + = IntegrationRuntimeSsisCatalogPricingTier.fromString(reader.getString()); + } else if ("dualStandbyPairName".equals(fieldName)) { + deserializedIntegrationRuntimeSsisCatalogInfo.dualStandbyPairName = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeSsisCatalogInfo.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeSsisCatalogInfo; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogPricingTier.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogPricingTier.java index fd6e659a3418..13d698990ccb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogPricingTier.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisCatalogPricingTier.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -49,7 +48,6 @@ public IntegrationRuntimeSsisCatalogPricingTier() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeSsisCatalogPricingTier. */ - @JsonCreator public static IntegrationRuntimeSsisCatalogPricingTier fromString(String name) { return fromString(name, IntegrationRuntimeSsisCatalogPricingTier.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisProperties.java index 747f169d27b0..9a9923e9d5cc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeSsisProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,59 +18,50 @@ * SSIS properties for managed integration runtime. */ @Fluent -public final class IntegrationRuntimeSsisProperties { +public final class IntegrationRuntimeSsisProperties implements JsonSerializable { /* * Catalog information for managed dedicated integration runtime. */ - @JsonProperty(value = "catalogInfo") private IntegrationRuntimeSsisCatalogInfo catalogInfo; /* * License type for bringing your own license scenario. */ - @JsonProperty(value = "licenseType") private IntegrationRuntimeLicenseType licenseType; /* * Custom setup script properties for a managed dedicated integration runtime. */ - @JsonProperty(value = "customSetupScriptProperties") private IntegrationRuntimeCustomSetupScriptProperties customSetupScriptProperties; /* * Data proxy properties for a managed dedicated integration runtime. */ - @JsonProperty(value = "dataProxyProperties") private IntegrationRuntimeDataProxyProperties dataProxyProperties; /* * The edition for the SSIS Integration Runtime */ - @JsonProperty(value = "edition") private IntegrationRuntimeEdition edition; /* * Custom setup without script properties for a SSIS integration runtime. */ - @JsonProperty(value = "expressCustomSetupProperties") private List expressCustomSetupProperties; /* * Package stores for the SSIS Integration Runtime. */ - @JsonProperty(value = "packageStores") private List packageStores; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /* * SSIS properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -250,7 +242,6 @@ public IntegrationRuntimeSsisProperties withCredential(CredentialReference crede * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -266,14 +257,6 @@ public IntegrationRuntimeSsisProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -299,4 +282,83 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("catalogInfo", this.catalogInfo); + jsonWriter.writeStringField("licenseType", this.licenseType == null ? null : this.licenseType.toString()); + jsonWriter.writeJsonField("customSetupScriptProperties", this.customSetupScriptProperties); + jsonWriter.writeJsonField("dataProxyProperties", this.dataProxyProperties); + jsonWriter.writeStringField("edition", this.edition == null ? null : this.edition.toString()); + jsonWriter.writeArrayField("expressCustomSetupProperties", this.expressCustomSetupProperties, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("packageStores", this.packageStores, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("credential", this.credential); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeSsisProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeSsisProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeSsisProperties. + */ + public static IntegrationRuntimeSsisProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeSsisProperties deserializedIntegrationRuntimeSsisProperties + = new IntegrationRuntimeSsisProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("catalogInfo".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.catalogInfo + = IntegrationRuntimeSsisCatalogInfo.fromJson(reader); + } else if ("licenseType".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.licenseType + = IntegrationRuntimeLicenseType.fromString(reader.getString()); + } else if ("customSetupScriptProperties".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.customSetupScriptProperties + = IntegrationRuntimeCustomSetupScriptProperties.fromJson(reader); + } else if ("dataProxyProperties".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.dataProxyProperties + = IntegrationRuntimeDataProxyProperties.fromJson(reader); + } else if ("edition".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.edition + = IntegrationRuntimeEdition.fromString(reader.getString()); + } else if ("expressCustomSetupProperties".equals(fieldName)) { + List expressCustomSetupProperties + = reader.readArray(reader1 -> CustomSetupBase.fromJson(reader1)); + deserializedIntegrationRuntimeSsisProperties.expressCustomSetupProperties + = expressCustomSetupProperties; + } else if ("packageStores".equals(fieldName)) { + List packageStores = reader.readArray(reader1 -> PackageStore.fromJson(reader1)); + deserializedIntegrationRuntimeSsisProperties.packageStores = packageStores; + } else if ("credential".equals(fieldName)) { + deserializedIntegrationRuntimeSsisProperties.credential = CredentialReference.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeSsisProperties.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeSsisProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeState.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeState.java index b91db266cbe1..56423ae55d4c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeState.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeState.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -77,7 +76,6 @@ public IntegrationRuntimeState() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeState. */ - @JsonCreator public static IntegrationRuntimeState fromString(String name) { return fromString(name, IntegrationRuntimeState.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java index ee31479f63be..2486f555f34f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeStatus.java @@ -5,54 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Integration runtime status. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = IntegrationRuntimeStatus.class, - visible = true) -@JsonTypeName("IntegrationRuntimeStatus") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Managed", value = ManagedIntegrationRuntimeStatus.class), - @JsonSubTypes.Type(name = "SelfHosted", value = SelfHostedIntegrationRuntimeStatus.class) }) @Fluent -public class IntegrationRuntimeStatus { +public class IntegrationRuntimeStatus implements JsonSerializable { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.fromString("IntegrationRuntimeStatus"); /* * The data factory name which the integration runtime belong to. */ - @JsonProperty(value = "dataFactoryName", access = JsonProperty.Access.WRITE_ONLY) private String dataFactoryName; /* * The state of integration runtime. */ - @JsonProperty(value = "state", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeState state; /* * Integration runtime status. */ - @JsonIgnore private Map additionalProperties; /** @@ -79,6 +62,17 @@ public String dataFactoryName() { return this.dataFactoryName; } + /** + * Set the dataFactoryName property: The data factory name which the integration runtime belong to. + * + * @param dataFactoryName the dataFactoryName value to set. + * @return the IntegrationRuntimeStatus object itself. + */ + IntegrationRuntimeStatus withDataFactoryName(String dataFactoryName) { + this.dataFactoryName = dataFactoryName; + return this; + } + /** * Get the state property: The state of integration runtime. * @@ -88,12 +82,22 @@ public IntegrationRuntimeState state() { return this.state; } + /** + * Set the state property: The state of integration runtime. + * + * @param state the state value to set. + * @return the IntegrationRuntimeStatus object itself. + */ + IntegrationRuntimeStatus withState(IntegrationRuntimeState state) { + this.state = state; + return this; + } + /** * Get the additionalProperties property: Integration runtime status. * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -109,14 +113,6 @@ public IntegrationRuntimeStatus withAdditionalProperties(Map add return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -124,4 +120,82 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeStatus from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeStatus if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeStatus. + */ + public static IntegrationRuntimeStatus fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Managed".equals(discriminatorValue)) { + return ManagedIntegrationRuntimeStatus.fromJson(readerToUse.reset()); + } else if ("SelfHosted".equals(discriminatorValue)) { + return SelfHostedIntegrationRuntimeStatus.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static IntegrationRuntimeStatus fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeStatus deserializedIntegrationRuntimeStatus = new IntegrationRuntimeStatus(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedIntegrationRuntimeStatus.type = IntegrationRuntimeType.fromString(reader.getString()); + } else if ("dataFactoryName".equals(fieldName)) { + deserializedIntegrationRuntimeStatus.dataFactoryName = reader.getString(); + } else if ("state".equals(fieldName)) { + deserializedIntegrationRuntimeStatus.state = IntegrationRuntimeState.fromString(reader.getString()); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeStatus.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeStatus; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeType.java index 12a31a27b004..abdc7d8eb2c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public IntegrationRuntimeType() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeType. */ - @JsonCreator public static IntegrationRuntimeType fromString(String name) { return fromString(name, IntegrationRuntimeType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeUpdateResult.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeUpdateResult.java index 542762f1daec..43c622731199 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeUpdateResult.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeUpdateResult.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public IntegrationRuntimeUpdateResult() { * @param name a name to look for. * @return the corresponding IntegrationRuntimeUpdateResult. */ - @JsonCreator public static IntegrationRuntimeUpdateResult fromString(String name) { return fromString(name, IntegrationRuntimeUpdateResult.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeVNetProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeVNetProperties.java index 9b9a76c6f3c3..54befe2d026a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeVNetProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/IntegrationRuntimeVNetProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,35 +18,30 @@ * VNet properties for managed integration runtime. */ @Fluent -public final class IntegrationRuntimeVNetProperties { +public final class IntegrationRuntimeVNetProperties implements JsonSerializable { /* * The ID of the VNet that this integration runtime will join. */ - @JsonProperty(value = "vNetId") private String vNetId; /* * The name of the subnet this integration runtime will join. */ - @JsonProperty(value = "subnet") private String subnet; /* * Resource IDs of the public IP addresses that this integration runtime will use. */ - @JsonProperty(value = "publicIPs") private List publicIPs; /* * The ID of subnet, to which this Azure-SSIS integration runtime will be joined. */ - @JsonProperty(value = "subnetId") private String subnetId; /* * VNet properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -139,7 +135,6 @@ public IntegrationRuntimeVNetProperties withSubnetId(String subnetId) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -155,14 +150,6 @@ public IntegrationRuntimeVNetProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -170,4 +157,62 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("vNetId", this.vNetId); + jsonWriter.writeStringField("subnet", this.subnet); + jsonWriter.writeArrayField("publicIPs", this.publicIPs, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("subnetId", this.subnetId); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of IntegrationRuntimeVNetProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of IntegrationRuntimeVNetProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the IntegrationRuntimeVNetProperties. + */ + public static IntegrationRuntimeVNetProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + IntegrationRuntimeVNetProperties deserializedIntegrationRuntimeVNetProperties + = new IntegrationRuntimeVNetProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("vNetId".equals(fieldName)) { + deserializedIntegrationRuntimeVNetProperties.vNetId = reader.getString(); + } else if ("subnet".equals(fieldName)) { + deserializedIntegrationRuntimeVNetProperties.subnet = reader.getString(); + } else if ("publicIPs".equals(fieldName)) { + List publicIPs = reader.readArray(reader1 -> reader1.getString()); + deserializedIntegrationRuntimeVNetProperties.publicIPs = publicIPs; + } else if ("subnetId".equals(fieldName)) { + deserializedIntegrationRuntimeVNetProperties.subnetId = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedIntegrationRuntimeVNetProperties.additionalProperties = additionalProperties; + + return deserializedIntegrationRuntimeVNetProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java index 76d822b7046d..de4fe4495cf0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.JiraLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Jira Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraLinkedService.class, visible = true) -@JsonTypeName("Jira") @Fluent public final class JiraLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Jira"; /* * Jira Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private JiraLinkedServiceTypeProperties innerTypeProperties = new JiraLinkedServiceTypeProperties(); /** @@ -307,4 +303,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(JiraLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JiraLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JiraLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the JiraLinkedService. + */ + public static JiraLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JiraLinkedService deserializedJiraLinkedService = new JiraLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedJiraLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedJiraLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedJiraLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedJiraLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedJiraLinkedService.innerTypeProperties + = JiraLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedJiraLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJiraLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedJiraLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java index 7e8a897de5b7..7c1ff0919258 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Jira Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraObjectDataset.class, visible = true) -@JsonTypeName("JiraObject") @Fluent public final class JiraObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JiraObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JiraObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JiraObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the JiraObjectDataset. + */ + public static JiraObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JiraObjectDataset deserializedJiraObjectDataset = new JiraObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedJiraObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedJiraObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedJiraObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedJiraObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedJiraObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedJiraObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedJiraObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedJiraObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedJiraObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJiraObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedJiraObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java index 528baedaaf3c..7d2fbcb6fce1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JiraSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Jira Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JiraSource.class, visible = true) -@JsonTypeName("JiraSource") @Fluent public final class JiraSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JiraSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public JiraSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JiraSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JiraSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the JiraSource. + */ + public static JiraSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JiraSource deserializedJiraSource = new JiraSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedJiraSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedJiraSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedJiraSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedJiraSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedJiraSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedJiraSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedJiraSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedJiraSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJiraSource.withAdditionalProperties(additionalProperties); + + return deserializedJiraSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java index ddd1d51b5823..175df8b75bfe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.JsonDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Json dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonDataset.class, visible = true) -@JsonTypeName("Json") @Fluent public final class JsonDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Json"; /* * Json dataset properties. */ - @JsonProperty(value = "typeProperties") private JsonDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the JsonDataset. + */ + public static JsonDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonDataset deserializedJsonDataset = new JsonDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedJsonDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedJsonDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedJsonDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedJsonDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedJsonDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedJsonDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedJsonDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedJsonDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedJsonDataset.innerTypeProperties = JsonDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonDataset.withAdditionalProperties(additionalProperties); + + return deserializedJsonDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java index f2f39c6f63f8..c45705f27dd2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonFormat.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The data stored in JSON format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonFormat.class, visible = true) -@JsonTypeName("JsonFormat") @Fluent public final class JsonFormat extends DatasetStorageFormat { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JsonFormat"; /* * File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value * is 'setOfObjects'. It is case-sensitive. */ - @JsonProperty(value = "filePattern") private Object filePattern; /* * The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "nestingSeparator") private Object nestingSeparator; /* @@ -44,14 +40,12 @@ public final class JsonFormat extends DatasetStorageFormat { * column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "encodingName") private Object encodingName; /* * The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "jsonNodeReference") private Object jsonNodeReference; /* @@ -60,7 +54,6 @@ public final class JsonFormat extends DatasetStorageFormat { * from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or * Expression with resultType object). */ - @JsonProperty(value = "jsonPathDefinition") private Object jsonPathDefinition; /** @@ -224,4 +217,72 @@ public JsonFormat withDeserializer(Object deserializer) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("serializer", serializer()); + jsonWriter.writeUntypedField("deserializer", deserializer()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("filePattern", this.filePattern); + jsonWriter.writeUntypedField("nestingSeparator", this.nestingSeparator); + jsonWriter.writeUntypedField("encodingName", this.encodingName); + jsonWriter.writeUntypedField("jsonNodeReference", this.jsonNodeReference); + jsonWriter.writeUntypedField("jsonPathDefinition", this.jsonPathDefinition); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonFormat if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the JsonFormat. + */ + public static JsonFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonFormat deserializedJsonFormat = new JsonFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serializer".equals(fieldName)) { + deserializedJsonFormat.withSerializer(reader.readUntyped()); + } else if ("deserializer".equals(fieldName)) { + deserializedJsonFormat.withDeserializer(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedJsonFormat.type = reader.getString(); + } else if ("filePattern".equals(fieldName)) { + deserializedJsonFormat.filePattern = reader.readUntyped(); + } else if ("nestingSeparator".equals(fieldName)) { + deserializedJsonFormat.nestingSeparator = reader.readUntyped(); + } else if ("encodingName".equals(fieldName)) { + deserializedJsonFormat.encodingName = reader.readUntyped(); + } else if ("jsonNodeReference".equals(fieldName)) { + deserializedJsonFormat.jsonNodeReference = reader.readUntyped(); + } else if ("jsonPathDefinition".equals(fieldName)) { + deserializedJsonFormat.jsonPathDefinition = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonFormat.withAdditionalProperties(additionalProperties); + + return deserializedJsonFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java index 9018c0c7a33b..b8c42ca582d9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Json read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonReadSettings.class, visible = true) -@JsonTypeName("JsonReadSettings") @Fluent public final class JsonReadSettings extends FormatReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JsonReadSettings"; /* * Compression settings. */ - @JsonProperty(value = "compressionProperties") private CompressionReadSettings compressionProperties; /** @@ -78,4 +75,54 @@ public void validate() { compressionProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("compressionProperties", this.compressionProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the JsonReadSettings. + */ + public static JsonReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonReadSettings deserializedJsonReadSettings = new JsonReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedJsonReadSettings.type = reader.getString(); + } else if ("compressionProperties".equals(fieldName)) { + deserializedJsonReadSettings.compressionProperties = CompressionReadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedJsonReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java index dc350a33e316..eac691067ead 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Json sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonSink.class, visible = true) -@JsonTypeName("JsonSink") @Fluent public final class JsonSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JsonSink"; /* * Json store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /* * Json format settings. */ - @JsonProperty(value = "formatSettings") private JsonWriteSettings formatSettings; /** @@ -161,4 +157,75 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the JsonSink. + */ + public static JsonSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonSink deserializedJsonSink = new JsonSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedJsonSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedJsonSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedJsonSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedJsonSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedJsonSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedJsonSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedJsonSink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedJsonSink.storeSettings = StoreWriteSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedJsonSink.formatSettings = JsonWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonSink.withAdditionalProperties(additionalProperties); + + return deserializedJsonSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java index 80706e6d9392..d3422261de45 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Json source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonSource.class, visible = true) -@JsonTypeName("JsonSource") @Fluent public final class JsonSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JsonSource"; /* * Json store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Json format settings. */ - @JsonProperty(value = "formatSettings") private JsonReadSettings formatSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -172,4 +167,72 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the JsonSource. + */ + public static JsonSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonSource deserializedJsonSource = new JsonSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedJsonSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedJsonSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedJsonSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedJsonSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedJsonSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedJsonSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedJsonSource.formatSettings = JsonReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedJsonSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonSource.withAdditionalProperties(additionalProperties); + + return deserializedJsonSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java index e0899f29de7a..8e1aaad013ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/JsonWriteSettings.java @@ -5,30 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Json write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = JsonWriteSettings.class, visible = true) -@JsonTypeName("JsonWriteSettings") @Fluent public final class JsonWriteSettings extends FormatWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "JsonWriteSettings"; /* * File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default * value is 'setOfObjects'. It is case-sensitive. */ - @JsonProperty(value = "filePattern") private Object filePattern; /** @@ -78,4 +75,54 @@ public JsonWriteSettings withFilePattern(Object filePattern) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("filePattern", this.filePattern); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of JsonWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of JsonWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the JsonWriteSettings. + */ + public static JsonWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + JsonWriteSettings deserializedJsonWriteSettings = new JsonWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedJsonWriteSettings.type = reader.getString(); + } else if ("filePattern".equals(fieldName)) { + deserializedJsonWriteSettings.filePattern = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedJsonWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedJsonWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java index 71f096045631..82eda9c16fac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.LakeHouseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Fabric LakeHouse linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseLinkedService.class, visible = true) -@JsonTypeName("LakeHouse") @Fluent public final class LakeHouseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouse"; /* * Microsoft Fabric LakeHouse linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private LakeHouseLinkedServiceTypeProperties innerTypeProperties = new LakeHouseLinkedServiceTypeProperties(); /** @@ -319,4 +315,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LakeHouseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LakeHouseLinkedService. + */ + public static LakeHouseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseLinkedService deserializedLakeHouseLinkedService = new LakeHouseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedLakeHouseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedLakeHouseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedLakeHouseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedLakeHouseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedLakeHouseLinkedService.innerTypeProperties + = LakeHouseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java index 945cd1db6377..3ef5258d793c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseLocation.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of Microsoft Fabric LakeHouse Files dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseLocation.class, visible = true) -@JsonTypeName("LakeHouseLocation") @Fluent public final class LakeHouseLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseLocation"; /** @@ -67,4 +65,57 @@ public LakeHouseLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseLocation. + */ + public static LakeHouseLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseLocation deserializedLakeHouseLocation = new LakeHouseLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedLakeHouseLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedLakeHouseLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseLocation.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java index 8a517f801c2c..abb338258e4b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseReadSettings.java @@ -5,80 +5,69 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Microsoft Fabric LakeHouse Files read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseReadSettings.class, visible = true) -@JsonTypeName("LakeHouseReadSettings") @Fluent public final class LakeHouseReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -322,4 +311,84 @@ public LakeHouseReadSettings withDisableMetricsCollection(Object disableMetricsC public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseReadSettings. + */ + public static LakeHouseReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseReadSettings deserializedLakeHouseReadSettings = new LakeHouseReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedLakeHouseReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedLakeHouseReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedLakeHouseReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedLakeHouseReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedLakeHouseReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedLakeHouseReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedLakeHouseReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedLakeHouseReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedLakeHouseReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedLakeHouseReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedLakeHouseReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java index cdc30a4237e8..4f27925ca5df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.LakeHouseTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Fabric LakeHouse Table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableDataset.class, visible = true) -@JsonTypeName("LakeHouseTable") @Fluent public final class LakeHouseTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseTable"; /* * Microsoft Fabric LakeHouse Table dataset properties. */ - @JsonProperty(value = "typeProperties") private LakeHouseTableDatasetTypeProperties innerTypeProperties; /** @@ -183,4 +179,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LakeHouseTableDataset. + */ + public static LakeHouseTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseTableDataset deserializedLakeHouseTableDataset = new LakeHouseTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedLakeHouseTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedLakeHouseTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedLakeHouseTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedLakeHouseTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedLakeHouseTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedLakeHouseTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedLakeHouseTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedLakeHouseTableDataset.innerTypeProperties + = LakeHouseTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java index 81f943495886..4668c41f53a8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSink.java @@ -5,43 +5,38 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity for Microsoft Fabric LakeHouse Table sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableSink.class, visible = true) -@JsonTypeName("LakeHouseTableSink") @Fluent public final class LakeHouseTableSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseTableSink"; /* * The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". */ - @JsonProperty(value = "tableActionOption") private Object tableActionOption; /* * Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be * a new partition. Possible values include: "None", "PartitionByKey". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType array * of objects). */ - @JsonProperty(value = "partitionNameList") private Object partitionNameList; /** @@ -189,4 +184,78 @@ public LakeHouseTableSink withDisableMetricsCollection(Object disableMetricsColl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("tableActionOption", this.tableActionOption); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeUntypedField("partitionNameList", this.partitionNameList); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseTableSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseTableSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseTableSink. + */ + public static LakeHouseTableSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseTableSink deserializedLakeHouseTableSink = new LakeHouseTableSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedLakeHouseTableSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedLakeHouseTableSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedLakeHouseTableSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedLakeHouseTableSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedLakeHouseTableSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedLakeHouseTableSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseTableSink.type = reader.getString(); + } else if ("tableActionOption".equals(fieldName)) { + deserializedLakeHouseTableSink.tableActionOption = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedLakeHouseTableSink.partitionOption = reader.readUntyped(); + } else if ("partitionNameList".equals(fieldName)) { + deserializedLakeHouseTableSink.partitionNameList = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseTableSink.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseTableSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java index 50b7dfc03408..21dd10bfbb74 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseTableSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Microsoft Fabric LakeHouse Table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseTableSource.class, visible = true) -@JsonTypeName("LakeHouseTableSource") @Fluent public final class LakeHouseTableSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseTableSource"; /* * Query an older snapshot by timestamp. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "timestampAsOf") private Object timestampAsOf; /* * Query an older snapshot by version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "versionAsOf") private Object versionAsOf; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -170,4 +165,72 @@ public LakeHouseTableSource withDisableMetricsCollection(Object disableMetricsCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("timestampAsOf", this.timestampAsOf); + jsonWriter.writeUntypedField("versionAsOf", this.versionAsOf); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseTableSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseTableSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseTableSource. + */ + public static LakeHouseTableSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseTableSource deserializedLakeHouseTableSource = new LakeHouseTableSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedLakeHouseTableSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedLakeHouseTableSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedLakeHouseTableSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedLakeHouseTableSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseTableSource.type = reader.getString(); + } else if ("timestampAsOf".equals(fieldName)) { + deserializedLakeHouseTableSource.timestampAsOf = reader.readUntyped(); + } else if ("versionAsOf".equals(fieldName)) { + deserializedLakeHouseTableSource.versionAsOf = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedLakeHouseTableSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseTableSource.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseTableSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java index ac14793498e1..d9a7c4ca81f8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LakeHouseWriteSettings.java @@ -5,24 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Microsoft Fabric LakeHouse Files write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LakeHouseWriteSettings.class, visible = true) -@JsonTypeName("LakeHouseWriteSettings") @Fluent public final class LakeHouseWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LakeHouseWriteSettings"; /** @@ -86,4 +84,64 @@ public LakeHouseWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LakeHouseWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LakeHouseWriteSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the LakeHouseWriteSettings. + */ + public static LakeHouseWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LakeHouseWriteSettings deserializedLakeHouseWriteSettings = new LakeHouseWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedLakeHouseWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedLakeHouseWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedLakeHouseWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedLakeHouseWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedLakeHouseWriteSettings.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLakeHouseWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedLakeHouseWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntime.java index a4a0686e4d2d..861dfd958b75 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntime.java @@ -5,42 +5,42 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; /** * The linked integration runtime information. */ @Immutable -public final class LinkedIntegrationRuntime { +public final class LinkedIntegrationRuntime implements JsonSerializable { /* * The name of the linked integration runtime. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The subscription ID for which the linked integration runtime belong to. */ - @JsonProperty(value = "subscriptionId", access = JsonProperty.Access.WRITE_ONLY) private String subscriptionId; /* * The name of the data factory for which the linked integration runtime belong to. */ - @JsonProperty(value = "dataFactoryName", access = JsonProperty.Access.WRITE_ONLY) private String dataFactoryName; /* * The location of the data factory for which the linked integration runtime belong to. */ - @JsonProperty(value = "dataFactoryLocation", access = JsonProperty.Access.WRITE_ONLY) private String dataFactoryLocation; /* * The creating time of the linked integration runtime. */ - @JsonProperty(value = "createTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime createTime; /** @@ -103,4 +103,48 @@ public OffsetDateTime createTime() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedIntegrationRuntime from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedIntegrationRuntime if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the LinkedIntegrationRuntime. + */ + public static LinkedIntegrationRuntime fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedIntegrationRuntime deserializedLinkedIntegrationRuntime = new LinkedIntegrationRuntime(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedLinkedIntegrationRuntime.name = reader.getString(); + } else if ("subscriptionId".equals(fieldName)) { + deserializedLinkedIntegrationRuntime.subscriptionId = reader.getString(); + } else if ("dataFactoryName".equals(fieldName)) { + deserializedLinkedIntegrationRuntime.dataFactoryName = reader.getString(); + } else if ("dataFactoryLocation".equals(fieldName)) { + deserializedLinkedIntegrationRuntime.dataFactoryLocation = reader.getString(); + } else if ("createTime".equals(fieldName)) { + deserializedLinkedIntegrationRuntime.createTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedIntegrationRuntime; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java index 5bd3d1a2b494..bfb5253ed94e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeKeyAuthorization.java @@ -6,33 +6,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The key authorization type integration runtime. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authorizationType", - defaultImpl = LinkedIntegrationRuntimeKeyAuthorization.class, - visible = true) -@JsonTypeName("Key") @Fluent public final class LinkedIntegrationRuntimeKeyAuthorization extends LinkedIntegrationRuntimeType { /* * The authorization type for integration runtime sharing. */ - @JsonTypeId - @JsonProperty(value = "authorizationType", required = true) private String authorizationType = "Key"; /* * The key used for authorization. */ - @JsonProperty(value = "key", required = true) private SecureString key; /** @@ -89,4 +80,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedIntegrationRuntimeKeyAuthorization.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("key", this.key); + jsonWriter.writeStringField("authorizationType", this.authorizationType); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedIntegrationRuntimeKeyAuthorization from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedIntegrationRuntimeKeyAuthorization if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedIntegrationRuntimeKeyAuthorization. + */ + public static LinkedIntegrationRuntimeKeyAuthorization fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedIntegrationRuntimeKeyAuthorization deserializedLinkedIntegrationRuntimeKeyAuthorization + = new LinkedIntegrationRuntimeKeyAuthorization(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("key".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeKeyAuthorization.key = SecureString.fromJson(reader); + } else if ("authorizationType".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeKeyAuthorization.authorizationType = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedIntegrationRuntimeKeyAuthorization; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java index 91844a66bfdf..a2ae750c7dc7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRbacAuthorization.java @@ -6,39 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The role based access control (RBAC) authorization type integration runtime. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authorizationType", - defaultImpl = LinkedIntegrationRuntimeRbacAuthorization.class, - visible = true) -@JsonTypeName("RBAC") @Fluent public final class LinkedIntegrationRuntimeRbacAuthorization extends LinkedIntegrationRuntimeType { /* * The authorization type for integration runtime sharing. */ - @JsonTypeId - @JsonProperty(value = "authorizationType", required = true) private String authorizationType = "RBAC"; /* * The resource identifier of the integration runtime to be shared. */ - @JsonProperty(value = "resourceId", required = true) private String resourceId; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -116,4 +106,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedIntegrationRuntimeRbacAuthorization.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("resourceId", this.resourceId); + jsonWriter.writeStringField("authorizationType", this.authorizationType); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedIntegrationRuntimeRbacAuthorization from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedIntegrationRuntimeRbacAuthorization if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedIntegrationRuntimeRbacAuthorization. + */ + public static LinkedIntegrationRuntimeRbacAuthorization fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedIntegrationRuntimeRbacAuthorization deserializedLinkedIntegrationRuntimeRbacAuthorization + = new LinkedIntegrationRuntimeRbacAuthorization(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("resourceId".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeRbacAuthorization.resourceId = reader.getString(); + } else if ("authorizationType".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeRbacAuthorization.authorizationType = reader.getString(); + } else if ("credential".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeRbacAuthorization.credential + = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedIntegrationRuntimeRbacAuthorization; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java index a5c406c517e3..d3d355c37aa9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeRequest.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Data factory name for linked integration runtime request. */ @Fluent -public final class LinkedIntegrationRuntimeRequest { +public final class LinkedIntegrationRuntimeRequest implements JsonSerializable { /* * The data factory name for linked integration runtime. */ - @JsonProperty(value = "factoryName", required = true) private String linkedFactoryName; /** @@ -59,4 +62,42 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedIntegrationRuntimeRequest.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("factoryName", this.linkedFactoryName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedIntegrationRuntimeRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedIntegrationRuntimeRequest if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedIntegrationRuntimeRequest. + */ + public static LinkedIntegrationRuntimeRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedIntegrationRuntimeRequest deserializedLinkedIntegrationRuntimeRequest + = new LinkedIntegrationRuntimeRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("factoryName".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeRequest.linkedFactoryName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedIntegrationRuntimeRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java index c33a3d46ef59..cab235f221f2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedIntegrationRuntimeType.java @@ -5,31 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The base definition of a linked integration runtime. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authorizationType", - defaultImpl = LinkedIntegrationRuntimeType.class, - visible = true) -@JsonTypeName("LinkedIntegrationRuntimeType") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Key", value = LinkedIntegrationRuntimeKeyAuthorization.class), - @JsonSubTypes.Type(name = "RBAC", value = LinkedIntegrationRuntimeRbacAuthorization.class) }) @Immutable -public class LinkedIntegrationRuntimeType { +public class LinkedIntegrationRuntimeType implements JsonSerializable { /* * The authorization type for integration runtime sharing. */ - @JsonTypeId - @JsonProperty(value = "authorizationType", required = true) private String authorizationType = "LinkedIntegrationRuntimeType"; /** @@ -54,4 +43,67 @@ public String authorizationType() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("authorizationType", this.authorizationType); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedIntegrationRuntimeType from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedIntegrationRuntimeType if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the LinkedIntegrationRuntimeType. + */ + public static LinkedIntegrationRuntimeType fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("authorizationType".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Key".equals(discriminatorValue)) { + return LinkedIntegrationRuntimeKeyAuthorization.fromJson(readerToUse.reset()); + } else if ("RBAC".equals(discriminatorValue)) { + return LinkedIntegrationRuntimeRbacAuthorization.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static LinkedIntegrationRuntimeType fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedIntegrationRuntimeType deserializedLinkedIntegrationRuntimeType = new LinkedIntegrationRuntimeType(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("authorizationType".equals(fieldName)) { + deserializedLinkedIntegrationRuntimeType.authorizationType = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedIntegrationRuntimeType; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java index 53d297c6caf9..3a9f401db97c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java @@ -5,15 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -22,169 +18,37 @@ * The nested object which contains the information and credential which can be used to connect with related store or * compute resource. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LinkedService.class, visible = true) -@JsonTypeName("LinkedService") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AzureStorage", value = AzureStorageLinkedService.class), - @JsonSubTypes.Type(name = "AzureBlobStorage", value = AzureBlobStorageLinkedService.class), - @JsonSubTypes.Type(name = "AzureTableStorage", value = AzureTableStorageLinkedService.class), - @JsonSubTypes.Type(name = "AzureSqlDW", value = AzureSqlDWLinkedService.class), - @JsonSubTypes.Type(name = "SqlServer", value = SqlServerLinkedService.class), - @JsonSubTypes.Type(name = "AmazonRdsForSqlServer", value = AmazonRdsForSqlServerLinkedService.class), - @JsonSubTypes.Type(name = "AzureSqlDatabase", value = AzureSqlDatabaseLinkedService.class), - @JsonSubTypes.Type(name = "AzureSqlMI", value = AzureSqlMILinkedService.class), - @JsonSubTypes.Type(name = "AzureBatch", value = AzureBatchLinkedService.class), - @JsonSubTypes.Type(name = "AzureKeyVault", value = AzureKeyVaultLinkedService.class), - @JsonSubTypes.Type(name = "CosmosDb", value = CosmosDbLinkedService.class), - @JsonSubTypes.Type(name = "Dynamics", value = DynamicsLinkedService.class), - @JsonSubTypes.Type(name = "DynamicsCrm", value = DynamicsCrmLinkedService.class), - @JsonSubTypes.Type(name = "CommonDataServiceForApps", value = CommonDataServiceForAppsLinkedService.class), - @JsonSubTypes.Type(name = "HDInsight", value = HDInsightLinkedService.class), - @JsonSubTypes.Type(name = "FileServer", value = FileServerLinkedService.class), - @JsonSubTypes.Type(name = "AzureFileStorage", value = AzureFileStorageLinkedService.class), - @JsonSubTypes.Type(name = "AmazonS3Compatible", value = AmazonS3CompatibleLinkedService.class), - @JsonSubTypes.Type(name = "OracleCloudStorage", value = OracleCloudStorageLinkedService.class), - @JsonSubTypes.Type(name = "GoogleCloudStorage", value = GoogleCloudStorageLinkedService.class), - @JsonSubTypes.Type(name = "Oracle", value = OracleLinkedService.class), - @JsonSubTypes.Type(name = "AmazonRdsForOracle", value = AmazonRdsForOracleLinkedService.class), - @JsonSubTypes.Type(name = "AzureMySql", value = AzureMySqlLinkedService.class), - @JsonSubTypes.Type(name = "MySql", value = MySqlLinkedService.class), - @JsonSubTypes.Type(name = "PostgreSql", value = PostgreSqlLinkedService.class), - @JsonSubTypes.Type(name = "PostgreSqlV2", value = PostgreSqlV2LinkedService.class), - @JsonSubTypes.Type(name = "Sybase", value = SybaseLinkedService.class), - @JsonSubTypes.Type(name = "Db2", value = Db2LinkedService.class), - @JsonSubTypes.Type(name = "Teradata", value = TeradataLinkedService.class), - @JsonSubTypes.Type(name = "AzureML", value = AzureMLLinkedService.class), - @JsonSubTypes.Type(name = "AzureMLService", value = AzureMLServiceLinkedService.class), - @JsonSubTypes.Type(name = "Odbc", value = OdbcLinkedService.class), - @JsonSubTypes.Type(name = "Informix", value = InformixLinkedService.class), - @JsonSubTypes.Type(name = "MicrosoftAccess", value = MicrosoftAccessLinkedService.class), - @JsonSubTypes.Type(name = "Hdfs", value = HdfsLinkedService.class), - @JsonSubTypes.Type(name = "OData", value = ODataLinkedService.class), - @JsonSubTypes.Type(name = "Web", value = WebLinkedService.class), - @JsonSubTypes.Type(name = "Cassandra", value = CassandraLinkedService.class), - @JsonSubTypes.Type(name = "MongoDb", value = MongoDbLinkedService.class), - @JsonSubTypes.Type(name = "MongoDbAtlas", value = MongoDbAtlasLinkedService.class), - @JsonSubTypes.Type(name = "MongoDbV2", value = MongoDbV2LinkedService.class), - @JsonSubTypes.Type(name = "CosmosDbMongoDbApi", value = CosmosDbMongoDbApiLinkedService.class), - @JsonSubTypes.Type(name = "AzureDataLakeStore", value = AzureDataLakeStoreLinkedService.class), - @JsonSubTypes.Type(name = "AzureBlobFS", value = AzureBlobFSLinkedService.class), - @JsonSubTypes.Type(name = "Office365", value = Office365LinkedService.class), - @JsonSubTypes.Type(name = "Salesforce", value = SalesforceLinkedService.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloud", value = SalesforceServiceCloudLinkedService.class), - @JsonSubTypes.Type(name = "SapCloudForCustomer", value = SapCloudForCustomerLinkedService.class), - @JsonSubTypes.Type(name = "SapEcc", value = SapEccLinkedService.class), - @JsonSubTypes.Type(name = "SapOpenHub", value = SapOpenHubLinkedService.class), - @JsonSubTypes.Type(name = "SapOdp", value = SapOdpLinkedService.class), - @JsonSubTypes.Type(name = "RestService", value = RestServiceLinkedService.class), - @JsonSubTypes.Type(name = "TeamDesk", value = TeamDeskLinkedService.class), - @JsonSubTypes.Type(name = "Quickbase", value = QuickbaseLinkedService.class), - @JsonSubTypes.Type(name = "Smartsheet", value = SmartsheetLinkedService.class), - @JsonSubTypes.Type(name = "Zendesk", value = ZendeskLinkedService.class), - @JsonSubTypes.Type(name = "Dataworld", value = DataworldLinkedService.class), - @JsonSubTypes.Type(name = "AppFigures", value = AppFiguresLinkedService.class), - @JsonSubTypes.Type(name = "Asana", value = AsanaLinkedService.class), - @JsonSubTypes.Type(name = "Twilio", value = TwilioLinkedService.class), - @JsonSubTypes.Type(name = "GoogleSheets", value = GoogleSheetsLinkedService.class), - @JsonSubTypes.Type(name = "AmazonS3", value = AmazonS3LinkedService.class), - @JsonSubTypes.Type(name = "AmazonRedshift", value = AmazonRedshiftLinkedService.class), - @JsonSubTypes.Type(name = "CustomDataSource", value = CustomDataSourceLinkedService.class), - @JsonSubTypes.Type(name = "AzureSearch", value = AzureSearchLinkedService.class), - @JsonSubTypes.Type(name = "HttpServer", value = HttpLinkedService.class), - @JsonSubTypes.Type(name = "FtpServer", value = FtpServerLinkedService.class), - @JsonSubTypes.Type(name = "Sftp", value = SftpServerLinkedService.class), - @JsonSubTypes.Type(name = "SapBW", value = SapBWLinkedService.class), - @JsonSubTypes.Type(name = "SapHana", value = SapHanaLinkedService.class), - @JsonSubTypes.Type(name = "AmazonMWS", value = AmazonMwsLinkedService.class), - @JsonSubTypes.Type(name = "AzurePostgreSql", value = AzurePostgreSqlLinkedService.class), - @JsonSubTypes.Type(name = "Concur", value = ConcurLinkedService.class), - @JsonSubTypes.Type(name = "Couchbase", value = CouchbaseLinkedService.class), - @JsonSubTypes.Type(name = "Drill", value = DrillLinkedService.class), - @JsonSubTypes.Type(name = "Eloqua", value = EloquaLinkedService.class), - @JsonSubTypes.Type(name = "GoogleBigQuery", value = GoogleBigQueryLinkedService.class), - @JsonSubTypes.Type(name = "GoogleBigQueryV2", value = GoogleBigQueryV2LinkedService.class), - @JsonSubTypes.Type(name = "Greenplum", value = GreenplumLinkedService.class), - @JsonSubTypes.Type(name = "HBase", value = HBaseLinkedService.class), - @JsonSubTypes.Type(name = "Hive", value = HiveLinkedService.class), - @JsonSubTypes.Type(name = "Hubspot", value = HubspotLinkedService.class), - @JsonSubTypes.Type(name = "Impala", value = ImpalaLinkedService.class), - @JsonSubTypes.Type(name = "Jira", value = JiraLinkedService.class), - @JsonSubTypes.Type(name = "Magento", value = MagentoLinkedService.class), - @JsonSubTypes.Type(name = "MariaDB", value = MariaDBLinkedService.class), - @JsonSubTypes.Type(name = "AzureMariaDB", value = AzureMariaDBLinkedService.class), - @JsonSubTypes.Type(name = "Marketo", value = MarketoLinkedService.class), - @JsonSubTypes.Type(name = "Paypal", value = PaypalLinkedService.class), - @JsonSubTypes.Type(name = "Phoenix", value = PhoenixLinkedService.class), - @JsonSubTypes.Type(name = "Presto", value = PrestoLinkedService.class), - @JsonSubTypes.Type(name = "QuickBooks", value = QuickBooksLinkedService.class), - @JsonSubTypes.Type(name = "ServiceNow", value = ServiceNowLinkedService.class), - @JsonSubTypes.Type(name = "Shopify", value = ShopifyLinkedService.class), - @JsonSubTypes.Type(name = "Spark", value = SparkLinkedService.class), - @JsonSubTypes.Type(name = "Square", value = SquareLinkedService.class), - @JsonSubTypes.Type(name = "Xero", value = XeroLinkedService.class), - @JsonSubTypes.Type(name = "Zoho", value = ZohoLinkedService.class), - @JsonSubTypes.Type(name = "Vertica", value = VerticaLinkedService.class), - @JsonSubTypes.Type(name = "Netezza", value = NetezzaLinkedService.class), - @JsonSubTypes.Type(name = "SalesforceMarketingCloud", value = SalesforceMarketingCloudLinkedService.class), - @JsonSubTypes.Type(name = "HDInsightOnDemand", value = HDInsightOnDemandLinkedService.class), - @JsonSubTypes.Type(name = "AzureDataLakeAnalytics", value = AzureDataLakeAnalyticsLinkedService.class), - @JsonSubTypes.Type(name = "AzureDatabricks", value = AzureDatabricksLinkedService.class), - @JsonSubTypes.Type(name = "AzureDatabricksDeltaLake", value = AzureDatabricksDeltaLakeLinkedService.class), - @JsonSubTypes.Type(name = "Responsys", value = ResponsysLinkedService.class), - @JsonSubTypes.Type(name = "DynamicsAX", value = DynamicsAXLinkedService.class), - @JsonSubTypes.Type(name = "OracleServiceCloud", value = OracleServiceCloudLinkedService.class), - @JsonSubTypes.Type(name = "GoogleAdWords", value = GoogleAdWordsLinkedService.class), - @JsonSubTypes.Type(name = "SapTable", value = SapTableLinkedService.class), - @JsonSubTypes.Type(name = "AzureDataExplorer", value = AzureDataExplorerLinkedService.class), - @JsonSubTypes.Type(name = "AzureFunction", value = AzureFunctionLinkedService.class), - @JsonSubTypes.Type(name = "Snowflake", value = SnowflakeLinkedService.class), - @JsonSubTypes.Type(name = "SnowflakeV2", value = SnowflakeV2LinkedService.class), - @JsonSubTypes.Type(name = "SharePointOnlineList", value = SharePointOnlineListLinkedService.class), - @JsonSubTypes.Type(name = "AzureSynapseArtifacts", value = AzureSynapseArtifactsLinkedService.class), - @JsonSubTypes.Type(name = "LakeHouse", value = LakeHouseLinkedService.class), - @JsonSubTypes.Type(name = "SalesforceV2", value = SalesforceV2LinkedService.class), - @JsonSubTypes.Type(name = "SalesforceServiceCloudV2", value = SalesforceServiceCloudV2LinkedService.class), - @JsonSubTypes.Type(name = "Warehouse", value = WarehouseLinkedService.class), - @JsonSubTypes.Type(name = "ServiceNowV2", value = ServiceNowV2LinkedService.class) }) @Fluent -public class LinkedService { +public class LinkedService implements JsonSerializable { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "LinkedService"; /* * The integration runtime reference. */ - @JsonProperty(value = "connectVia") private IntegrationRuntimeReference connectVia; /* * Linked service description. */ - @JsonProperty(value = "description") private String description; /* * Parameters for linked service. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /* * List of tags that can be used for describing the linked service. */ - @JsonProperty(value = "annotations") private List annotations; /* * The nested object which contains the information and credential which can be used to connect with related store * or compute resource. */ - @JsonIgnore private Map additionalProperties; /** @@ -288,7 +152,6 @@ public LinkedService withAnnotations(List annotations) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -305,14 +168,6 @@ public LinkedService withAdditionalProperties(Map additionalProp return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -330,4 +185,331 @@ public void validate() { }); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("connectVia", this.connectVia); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the LinkedService. + */ + public static LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureStorage".equals(discriminatorValue)) { + return AzureStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureBlobStorage".equals(discriminatorValue)) { + return AzureBlobStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureTableStorage".equals(discriminatorValue)) { + return AzureTableStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureSqlDW".equals(discriminatorValue)) { + return AzureSqlDWLinkedService.fromJson(readerToUse.reset()); + } else if ("SqlServer".equals(discriminatorValue)) { + return SqlServerLinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForSqlServer".equals(discriminatorValue)) { + return AmazonRdsForSqlServerLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureSqlDatabase".equals(discriminatorValue)) { + return AzureSqlDatabaseLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureSqlMI".equals(discriminatorValue)) { + return AzureSqlMILinkedService.fromJson(readerToUse.reset()); + } else if ("AzureBatch".equals(discriminatorValue)) { + return AzureBatchLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureKeyVault".equals(discriminatorValue)) { + return AzureKeyVaultLinkedService.fromJson(readerToUse.reset()); + } else if ("CosmosDb".equals(discriminatorValue)) { + return CosmosDbLinkedService.fromJson(readerToUse.reset()); + } else if ("Dynamics".equals(discriminatorValue)) { + return DynamicsLinkedService.fromJson(readerToUse.reset()); + } else if ("DynamicsCrm".equals(discriminatorValue)) { + return DynamicsCrmLinkedService.fromJson(readerToUse.reset()); + } else if ("CommonDataServiceForApps".equals(discriminatorValue)) { + return CommonDataServiceForAppsLinkedService.fromJson(readerToUse.reset()); + } else if ("HDInsight".equals(discriminatorValue)) { + return HDInsightLinkedService.fromJson(readerToUse.reset()); + } else if ("FileServer".equals(discriminatorValue)) { + return FileServerLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureFileStorage".equals(discriminatorValue)) { + return AzureFileStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonS3Compatible".equals(discriminatorValue)) { + return AmazonS3CompatibleLinkedService.fromJson(readerToUse.reset()); + } else if ("OracleCloudStorage".equals(discriminatorValue)) { + return OracleCloudStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("GoogleCloudStorage".equals(discriminatorValue)) { + return GoogleCloudStorageLinkedService.fromJson(readerToUse.reset()); + } else if ("Oracle".equals(discriminatorValue)) { + return OracleLinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForOracle".equals(discriminatorValue)) { + return AmazonRdsForOracleLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureMySql".equals(discriminatorValue)) { + return AzureMySqlLinkedService.fromJson(readerToUse.reset()); + } else if ("MySql".equals(discriminatorValue)) { + return MySqlLinkedService.fromJson(readerToUse.reset()); + } else if ("PostgreSql".equals(discriminatorValue)) { + return PostgreSqlLinkedService.fromJson(readerToUse.reset()); + } else if ("PostgreSqlV2".equals(discriminatorValue)) { + return PostgreSqlV2LinkedService.fromJson(readerToUse.reset()); + } else if ("Sybase".equals(discriminatorValue)) { + return SybaseLinkedService.fromJson(readerToUse.reset()); + } else if ("Db2".equals(discriminatorValue)) { + return Db2LinkedService.fromJson(readerToUse.reset()); + } else if ("Teradata".equals(discriminatorValue)) { + return TeradataLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureML".equals(discriminatorValue)) { + return AzureMLLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureMLService".equals(discriminatorValue)) { + return AzureMLServiceLinkedService.fromJson(readerToUse.reset()); + } else if ("Odbc".equals(discriminatorValue)) { + return OdbcLinkedService.fromJson(readerToUse.reset()); + } else if ("Informix".equals(discriminatorValue)) { + return InformixLinkedService.fromJson(readerToUse.reset()); + } else if ("MicrosoftAccess".equals(discriminatorValue)) { + return MicrosoftAccessLinkedService.fromJson(readerToUse.reset()); + } else if ("Hdfs".equals(discriminatorValue)) { + return HdfsLinkedService.fromJson(readerToUse.reset()); + } else if ("OData".equals(discriminatorValue)) { + return ODataLinkedService.fromJson(readerToUse.reset()); + } else if ("Web".equals(discriminatorValue)) { + return WebLinkedService.fromJson(readerToUse.reset()); + } else if ("Cassandra".equals(discriminatorValue)) { + return CassandraLinkedService.fromJson(readerToUse.reset()); + } else if ("MongoDb".equals(discriminatorValue)) { + return MongoDbLinkedService.fromJson(readerToUse.reset()); + } else if ("MongoDbAtlas".equals(discriminatorValue)) { + return MongoDbAtlasLinkedService.fromJson(readerToUse.reset()); + } else if ("MongoDbV2".equals(discriminatorValue)) { + return MongoDbV2LinkedService.fromJson(readerToUse.reset()); + } else if ("CosmosDbMongoDbApi".equals(discriminatorValue)) { + return CosmosDbMongoDbApiLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStore".equals(discriminatorValue)) { + return AzureDataLakeStoreLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureBlobFS".equals(discriminatorValue)) { + return AzureBlobFSLinkedService.fromJson(readerToUse.reset()); + } else if ("Office365".equals(discriminatorValue)) { + return Office365LinkedService.fromJson(readerToUse.reset()); + } else if ("Salesforce".equals(discriminatorValue)) { + return SalesforceLinkedService.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloud".equals(discriminatorValue)) { + return SalesforceServiceCloudLinkedService.fromJson(readerToUse.reset()); + } else if ("SapCloudForCustomer".equals(discriminatorValue)) { + return SapCloudForCustomerLinkedService.fromJson(readerToUse.reset()); + } else if ("SapEcc".equals(discriminatorValue)) { + return SapEccLinkedService.fromJson(readerToUse.reset()); + } else if ("SapOpenHub".equals(discriminatorValue)) { + return SapOpenHubLinkedService.fromJson(readerToUse.reset()); + } else if ("SapOdp".equals(discriminatorValue)) { + return SapOdpLinkedService.fromJson(readerToUse.reset()); + } else if ("RestService".equals(discriminatorValue)) { + return RestServiceLinkedService.fromJson(readerToUse.reset()); + } else if ("TeamDesk".equals(discriminatorValue)) { + return TeamDeskLinkedService.fromJson(readerToUse.reset()); + } else if ("Quickbase".equals(discriminatorValue)) { + return QuickbaseLinkedService.fromJson(readerToUse.reset()); + } else if ("Smartsheet".equals(discriminatorValue)) { + return SmartsheetLinkedService.fromJson(readerToUse.reset()); + } else if ("Zendesk".equals(discriminatorValue)) { + return ZendeskLinkedService.fromJson(readerToUse.reset()); + } else if ("Dataworld".equals(discriminatorValue)) { + return DataworldLinkedService.fromJson(readerToUse.reset()); + } else if ("AppFigures".equals(discriminatorValue)) { + return AppFiguresLinkedService.fromJson(readerToUse.reset()); + } else if ("Asana".equals(discriminatorValue)) { + return AsanaLinkedService.fromJson(readerToUse.reset()); + } else if ("Twilio".equals(discriminatorValue)) { + return TwilioLinkedService.fromJson(readerToUse.reset()); + } else if ("GoogleSheets".equals(discriminatorValue)) { + return GoogleSheetsLinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonS3".equals(discriminatorValue)) { + return AmazonS3LinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonRedshift".equals(discriminatorValue)) { + return AmazonRedshiftLinkedService.fromJson(readerToUse.reset()); + } else if ("CustomDataSource".equals(discriminatorValue)) { + return CustomDataSourceLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureSearch".equals(discriminatorValue)) { + return AzureSearchLinkedService.fromJson(readerToUse.reset()); + } else if ("HttpServer".equals(discriminatorValue)) { + return HttpLinkedService.fromJson(readerToUse.reset()); + } else if ("FtpServer".equals(discriminatorValue)) { + return FtpServerLinkedService.fromJson(readerToUse.reset()); + } else if ("Sftp".equals(discriminatorValue)) { + return SftpServerLinkedService.fromJson(readerToUse.reset()); + } else if ("SapBW".equals(discriminatorValue)) { + return SapBWLinkedService.fromJson(readerToUse.reset()); + } else if ("SapHana".equals(discriminatorValue)) { + return SapHanaLinkedService.fromJson(readerToUse.reset()); + } else if ("AmazonMWS".equals(discriminatorValue)) { + return AmazonMwsLinkedService.fromJson(readerToUse.reset()); + } else if ("AzurePostgreSql".equals(discriminatorValue)) { + return AzurePostgreSqlLinkedService.fromJson(readerToUse.reset()); + } else if ("Concur".equals(discriminatorValue)) { + return ConcurLinkedService.fromJson(readerToUse.reset()); + } else if ("Couchbase".equals(discriminatorValue)) { + return CouchbaseLinkedService.fromJson(readerToUse.reset()); + } else if ("Drill".equals(discriminatorValue)) { + return DrillLinkedService.fromJson(readerToUse.reset()); + } else if ("Eloqua".equals(discriminatorValue)) { + return EloquaLinkedService.fromJson(readerToUse.reset()); + } else if ("GoogleBigQuery".equals(discriminatorValue)) { + return GoogleBigQueryLinkedService.fromJson(readerToUse.reset()); + } else if ("GoogleBigQueryV2".equals(discriminatorValue)) { + return GoogleBigQueryV2LinkedService.fromJson(readerToUse.reset()); + } else if ("Greenplum".equals(discriminatorValue)) { + return GreenplumLinkedService.fromJson(readerToUse.reset()); + } else if ("HBase".equals(discriminatorValue)) { + return HBaseLinkedService.fromJson(readerToUse.reset()); + } else if ("Hive".equals(discriminatorValue)) { + return HiveLinkedService.fromJson(readerToUse.reset()); + } else if ("Hubspot".equals(discriminatorValue)) { + return HubspotLinkedService.fromJson(readerToUse.reset()); + } else if ("Impala".equals(discriminatorValue)) { + return ImpalaLinkedService.fromJson(readerToUse.reset()); + } else if ("Jira".equals(discriminatorValue)) { + return JiraLinkedService.fromJson(readerToUse.reset()); + } else if ("Magento".equals(discriminatorValue)) { + return MagentoLinkedService.fromJson(readerToUse.reset()); + } else if ("MariaDB".equals(discriminatorValue)) { + return MariaDBLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureMariaDB".equals(discriminatorValue)) { + return AzureMariaDBLinkedService.fromJson(readerToUse.reset()); + } else if ("Marketo".equals(discriminatorValue)) { + return MarketoLinkedService.fromJson(readerToUse.reset()); + } else if ("Paypal".equals(discriminatorValue)) { + return PaypalLinkedService.fromJson(readerToUse.reset()); + } else if ("Phoenix".equals(discriminatorValue)) { + return PhoenixLinkedService.fromJson(readerToUse.reset()); + } else if ("Presto".equals(discriminatorValue)) { + return PrestoLinkedService.fromJson(readerToUse.reset()); + } else if ("QuickBooks".equals(discriminatorValue)) { + return QuickBooksLinkedService.fromJson(readerToUse.reset()); + } else if ("ServiceNow".equals(discriminatorValue)) { + return ServiceNowLinkedService.fromJson(readerToUse.reset()); + } else if ("Shopify".equals(discriminatorValue)) { + return ShopifyLinkedService.fromJson(readerToUse.reset()); + } else if ("Spark".equals(discriminatorValue)) { + return SparkLinkedService.fromJson(readerToUse.reset()); + } else if ("Square".equals(discriminatorValue)) { + return SquareLinkedService.fromJson(readerToUse.reset()); + } else if ("Xero".equals(discriminatorValue)) { + return XeroLinkedService.fromJson(readerToUse.reset()); + } else if ("Zoho".equals(discriminatorValue)) { + return ZohoLinkedService.fromJson(readerToUse.reset()); + } else if ("Vertica".equals(discriminatorValue)) { + return VerticaLinkedService.fromJson(readerToUse.reset()); + } else if ("Netezza".equals(discriminatorValue)) { + return NetezzaLinkedService.fromJson(readerToUse.reset()); + } else if ("SalesforceMarketingCloud".equals(discriminatorValue)) { + return SalesforceMarketingCloudLinkedService.fromJson(readerToUse.reset()); + } else if ("HDInsightOnDemand".equals(discriminatorValue)) { + return HDInsightOnDemandLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeAnalytics".equals(discriminatorValue)) { + return AzureDataLakeAnalyticsLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureDatabricks".equals(discriminatorValue)) { + return AzureDatabricksLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureDatabricksDeltaLake".equals(discriminatorValue)) { + return AzureDatabricksDeltaLakeLinkedService.fromJson(readerToUse.reset()); + } else if ("Responsys".equals(discriminatorValue)) { + return ResponsysLinkedService.fromJson(readerToUse.reset()); + } else if ("DynamicsAX".equals(discriminatorValue)) { + return DynamicsAXLinkedService.fromJson(readerToUse.reset()); + } else if ("OracleServiceCloud".equals(discriminatorValue)) { + return OracleServiceCloudLinkedService.fromJson(readerToUse.reset()); + } else if ("GoogleAdWords".equals(discriminatorValue)) { + return GoogleAdWordsLinkedService.fromJson(readerToUse.reset()); + } else if ("SapTable".equals(discriminatorValue)) { + return SapTableLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureDataExplorer".equals(discriminatorValue)) { + return AzureDataExplorerLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureFunction".equals(discriminatorValue)) { + return AzureFunctionLinkedService.fromJson(readerToUse.reset()); + } else if ("Snowflake".equals(discriminatorValue)) { + return SnowflakeLinkedService.fromJson(readerToUse.reset()); + } else if ("SnowflakeV2".equals(discriminatorValue)) { + return SnowflakeV2LinkedService.fromJson(readerToUse.reset()); + } else if ("SharePointOnlineList".equals(discriminatorValue)) { + return SharePointOnlineListLinkedService.fromJson(readerToUse.reset()); + } else if ("AzureSynapseArtifacts".equals(discriminatorValue)) { + return AzureSynapseArtifactsLinkedService.fromJson(readerToUse.reset()); + } else if ("LakeHouse".equals(discriminatorValue)) { + return LakeHouseLinkedService.fromJson(readerToUse.reset()); + } else if ("SalesforceV2".equals(discriminatorValue)) { + return SalesforceV2LinkedService.fromJson(readerToUse.reset()); + } else if ("SalesforceServiceCloudV2".equals(discriminatorValue)) { + return SalesforceServiceCloudV2LinkedService.fromJson(readerToUse.reset()); + } else if ("Warehouse".equals(discriminatorValue)) { + return WarehouseLinkedService.fromJson(readerToUse.reset()); + } else if ("ServiceNowV2".equals(discriminatorValue)) { + return ServiceNowV2LinkedService.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static LinkedService fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedService deserializedLinkedService = new LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedLinkedService.type = reader.getString(); + } else if ("connectVia".equals(fieldName)) { + deserializedLinkedService.connectVia = IntegrationRuntimeReference.fromJson(reader); + } else if ("description".equals(fieldName)) { + deserializedLinkedService.description = reader.getString(); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedLinkedService.parameters = parameters; + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedLinkedService.annotations = annotations; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLinkedService.additionalProperties = additionalProperties; + + return deserializedLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java index 3cc8115c53a0..9a7249201850 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceDebugResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Linked service debug resource. @@ -16,7 +19,6 @@ public final class LinkedServiceDebugResource extends SubResourceDebugResource { /* * Properties of linked service. */ - @JsonProperty(value = "properties", required = true) private LinkedService properties; /** @@ -72,4 +74,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedServiceDebugResource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedServiceDebugResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedServiceDebugResource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedServiceDebugResource. + */ + public static LinkedServiceDebugResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedServiceDebugResource deserializedLinkedServiceDebugResource = new LinkedServiceDebugResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedLinkedServiceDebugResource.withName(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedLinkedServiceDebugResource.properties = LinkedService.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedServiceDebugResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java index fa2bdf17844c..97e0cdd34c94 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.LinkedServiceResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of linked service resources. */ @Fluent -public final class LinkedServiceListResponse { +public final class LinkedServiceListResponse implements JsonSerializable { /* * List of linked services. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedServiceListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedServiceListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedServiceListResponse if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedServiceListResponse. + */ + public static LinkedServiceListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedServiceListResponse deserializedLinkedServiceListResponse = new LinkedServiceListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> LinkedServiceResourceInner.fromJson(reader1)); + deserializedLinkedServiceListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedLinkedServiceListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedServiceListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java index 7c69e3f1842a..7c74e311c43d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedServiceReference.java @@ -6,32 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; /** * Linked service reference type. */ @Fluent -public final class LinkedServiceReference { +public final class LinkedServiceReference implements JsonSerializable { /* * Linked service reference type. */ - @JsonProperty(value = "type", required = true) private String type = "LinkedServiceReference"; /* * Reference LinkedService name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Arguments for LinkedService. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /** @@ -114,4 +113,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LinkedServiceReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("referenceName", this.referenceName); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LinkedServiceReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LinkedServiceReference if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LinkedServiceReference. + */ + public static LinkedServiceReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LinkedServiceReference deserializedLinkedServiceReference = new LinkedServiceReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceName".equals(fieldName)) { + deserializedLinkedServiceReference.referenceName = reader.getString(); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedLinkedServiceReference.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedLinkedServiceReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java index ca9c5c3d2b63..b74a9c55dcd3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogLocationSettings.java @@ -6,24 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Log location settings. */ @Fluent -public final class LogLocationSettings { +public final class LogLocationSettings implements JsonSerializable { /* * Log storage linked service reference. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "path") private Object path; /** @@ -90,4 +92,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LogLocationSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeUntypedField("path", this.path); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LogLocationSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LogLocationSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LogLocationSettings. + */ + public static LogLocationSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LogLocationSettings deserializedLogLocationSettings = new LogLocationSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedLogLocationSettings.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("path".equals(fieldName)) { + deserializedLogLocationSettings.path = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedLogLocationSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java index 2cb732c4fb0b..85e25acaf312 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogSettings.java @@ -6,29 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Log settings. */ @Fluent -public final class LogSettings { +public final class LogSettings implements JsonSerializable { /* * Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableCopyActivityLog") private Object enableCopyActivityLog; /* * Specifies settings for copy activity log. */ - @JsonProperty(value = "copyActivityLogSettings") private CopyActivityLogSettings copyActivityLogSettings; /* * Log location settings customer needs to provide when enabling log. */ - @JsonProperty(value = "logLocationSettings", required = true) private LogLocationSettings logLocationSettings; /** @@ -118,4 +119,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LogSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("logLocationSettings", this.logLocationSettings); + jsonWriter.writeUntypedField("enableCopyActivityLog", this.enableCopyActivityLog); + jsonWriter.writeJsonField("copyActivityLogSettings", this.copyActivityLogSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LogSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LogSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LogSettings. + */ + public static LogSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LogSettings deserializedLogSettings = new LogSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("logLocationSettings".equals(fieldName)) { + deserializedLogSettings.logLocationSettings = LogLocationSettings.fromJson(reader); + } else if ("enableCopyActivityLog".equals(fieldName)) { + deserializedLogSettings.enableCopyActivityLog = reader.readUntyped(); + } else if ("copyActivityLogSettings".equals(fieldName)) { + deserializedLogSettings.copyActivityLogSettings = CopyActivityLogSettings.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedLogSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java index 52e85871b2c8..933a52d4f1e3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LogStorageSettings.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -17,36 +18,31 @@ * (Deprecated. Please use LogSettings) Log storage settings. */ @Fluent -public final class LogStorageSettings { +public final class LogStorageSettings implements JsonSerializable { /* * Log storage linked service reference. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "path") private Object path; /* * Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logLevel") private Object logLevel; /* * Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableReliableLogging") private Object enableReliableLogging; /* * (Deprecated. Please use LogSettings) Log storage settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -146,7 +142,6 @@ public LogStorageSettings withEnableReliableLogging(Object enableReliableLogging * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -162,14 +157,6 @@ public LogStorageSettings withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -186,4 +173,61 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LogStorageSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeUntypedField("path", this.path); + jsonWriter.writeUntypedField("logLevel", this.logLevel); + jsonWriter.writeUntypedField("enableReliableLogging", this.enableReliableLogging); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LogStorageSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LogStorageSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LogStorageSettings. + */ + public static LogStorageSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LogStorageSettings deserializedLogStorageSettings = new LogStorageSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedLogStorageSettings.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("path".equals(fieldName)) { + deserializedLogStorageSettings.path = reader.readUntyped(); + } else if ("logLevel".equals(fieldName)) { + deserializedLogStorageSettings.logLevel = reader.readUntyped(); + } else if ("enableReliableLogging".equals(fieldName)) { + deserializedLogStorageSettings.enableReliableLogging = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLogStorageSettings.additionalProperties = additionalProperties; + + return deserializedLogStorageSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java index 62217c0bb610..f1695e279f69 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LookupActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.LookupActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Lookup activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LookupActivity.class, visible = true) -@JsonTypeName("Lookup") @Fluent public final class LookupActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Lookup"; /* * Lookup activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private LookupActivityTypeProperties innerTypeProperties = new LookupActivityTypeProperties(); /** @@ -219,4 +216,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(LookupActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of LookupActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of LookupActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the LookupActivity. + */ + public static LookupActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + LookupActivity deserializedLookupActivity = new LookupActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedLookupActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedLookupActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedLookupActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedLookupActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedLookupActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedLookupActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedLookupActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedLookupActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedLookupActivity.innerTypeProperties = LookupActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedLookupActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedLookupActivity.withAdditionalProperties(additionalProperties); + + return deserializedLookupActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java index 285254617c3e..a843ae4e3644 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MagentoLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Magento server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoLinkedService.class, visible = true) -@JsonTypeName("Magento") @Fluent public final class MagentoLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Magento"; /* * Magento server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MagentoLinkedServiceTypeProperties innerTypeProperties = new MagentoLinkedServiceTypeProperties(); /** @@ -259,4 +255,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MagentoLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MagentoLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MagentoLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MagentoLinkedService. + */ + public static MagentoLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MagentoLinkedService deserializedMagentoLinkedService = new MagentoLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMagentoLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMagentoLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMagentoLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMagentoLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMagentoLinkedService.innerTypeProperties + = MagentoLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMagentoLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMagentoLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMagentoLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java index 82dbf391f148..06753e563b5d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Magento server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoObjectDataset.class, visible = true) -@JsonTypeName("MagentoObject") @Fluent public final class MagentoObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MagentoObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MagentoObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MagentoObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MagentoObjectDataset. + */ + public static MagentoObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MagentoObjectDataset deserializedMagentoObjectDataset = new MagentoObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMagentoObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMagentoObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMagentoObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMagentoObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMagentoObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMagentoObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMagentoObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMagentoObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMagentoObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMagentoObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedMagentoObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java index 402b3e141231..44f7a5a12401 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MagentoSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Magento server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MagentoSource.class, visible = true) -@JsonTypeName("MagentoSource") @Fluent public final class MagentoSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MagentoSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public MagentoSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MagentoSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MagentoSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MagentoSource. + */ + public static MagentoSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MagentoSource deserializedMagentoSource = new MagentoSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMagentoSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMagentoSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMagentoSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMagentoSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMagentoSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMagentoSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMagentoSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMagentoSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMagentoSource.withAdditionalProperties(additionalProperties); + + return deserializedMagentoSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java index d6959766dc3b..0c7f17959e46 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIdentityCredential.java @@ -5,35 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Managed identity credential. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ManagedIdentityCredential.class, - visible = true) -@JsonTypeName("ManagedIdentity") @Fluent public final class ManagedIdentityCredential extends Credential { /* * Type of credential. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ManagedIdentity"; /* * Managed identity credential properties. */ - @JsonProperty(value = "typeProperties") private ManagedIdentityTypeProperties innerTypeProperties; /** @@ -114,4 +107,62 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIdentityCredential from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIdentityCredential if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIdentityCredential. + */ + public static ManagedIdentityCredential fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIdentityCredential deserializedManagedIdentityCredential = new ManagedIdentityCredential(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedManagedIdentityCredential.withDescription(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedManagedIdentityCredential.withAnnotations(annotations); + } else if ("type".equals(fieldName)) { + deserializedManagedIdentityCredential.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedManagedIdentityCredential.innerTypeProperties + = ManagedIdentityTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIdentityCredential.withAdditionalProperties(additionalProperties); + + return deserializedManagedIdentityCredential; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java index 44635a286948..b3e5ee15711b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntime.java @@ -6,46 +6,37 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ManagedIntegrationRuntime.class, - visible = true) -@JsonTypeName("Managed") @Fluent public final class ManagedIntegrationRuntime extends IntegrationRuntime { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.MANAGED; /* * Integration runtime state, only valid for managed dedicated integration runtime. */ - @JsonProperty(value = "state", access = JsonProperty.Access.WRITE_ONLY) private IntegrationRuntimeState state; /* * Managed integration runtime properties. */ - @JsonProperty(value = "typeProperties", required = true) private ManagedIntegrationRuntimeTypeProperties innerTypeProperties = new ManagedIntegrationRuntimeTypeProperties(); /* * Managed Virtual Network reference. */ - @JsonProperty(value = "managedVirtualNetwork") private ManagedVirtualNetworkReference managedVirtualNetwork; /** @@ -204,4 +195,66 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedIntegrationRuntime.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeJsonField("managedVirtualNetwork", this.managedVirtualNetwork); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntime from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntime if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntime. + */ + public static ManagedIntegrationRuntime fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntime deserializedManagedIntegrationRuntime = new ManagedIntegrationRuntime(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedManagedIntegrationRuntime.withDescription(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedManagedIntegrationRuntime.innerTypeProperties + = ManagedIntegrationRuntimeTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedManagedIntegrationRuntime.type = IntegrationRuntimeType.fromString(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedManagedIntegrationRuntime.state + = IntegrationRuntimeState.fromString(reader.getString()); + } else if ("managedVirtualNetwork".equals(fieldName)) { + deserializedManagedIntegrationRuntime.managedVirtualNetwork + = ManagedVirtualNetworkReference.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIntegrationRuntime.withAdditionalProperties(additionalProperties); + + return deserializedManagedIntegrationRuntime; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeError.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeError.java index 9e6edaf7e249..68342a9c1526 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeError.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeError.java @@ -5,10 +5,12 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.List; @@ -18,35 +20,30 @@ * Error definition for managed integration runtime. */ @Fluent -public final class ManagedIntegrationRuntimeError { +public final class ManagedIntegrationRuntimeError implements JsonSerializable { /* * The time when the error occurred. */ - @JsonProperty(value = "time", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime time; /* * Error code. */ - @JsonProperty(value = "code", access = JsonProperty.Access.WRITE_ONLY) private String code; /* * Managed integration runtime error parameters. */ - @JsonProperty(value = "parameters", access = JsonProperty.Access.WRITE_ONLY) private List parameters; /* * Error message. */ - @JsonProperty(value = "message", access = JsonProperty.Access.WRITE_ONLY) private String message; /* * Error definition for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -96,7 +93,6 @@ public String message() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -112,14 +108,6 @@ public ManagedIntegrationRuntimeError withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -127,4 +115,59 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeError from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeError if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeError. + */ + public static ManagedIntegrationRuntimeError fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeError deserializedManagedIntegrationRuntimeError + = new ManagedIntegrationRuntimeError(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("time".equals(fieldName)) { + deserializedManagedIntegrationRuntimeError.time = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("code".equals(fieldName)) { + deserializedManagedIntegrationRuntimeError.code = reader.getString(); + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> reader1.getString()); + deserializedManagedIntegrationRuntimeError.parameters = parameters; + } else if ("message".equals(fieldName)) { + deserializedManagedIntegrationRuntimeError.message = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIntegrationRuntimeError.additionalProperties = additionalProperties; + + return deserializedManagedIntegrationRuntimeError; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNode.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNode.java index dafca127b2d2..72e0e3f124f2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNode.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNode.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,29 +18,25 @@ * Properties of integration runtime node. */ @Fluent -public final class ManagedIntegrationRuntimeNode { +public final class ManagedIntegrationRuntimeNode implements JsonSerializable { /* * The managed integration runtime node id. */ - @JsonProperty(value = "nodeId", access = JsonProperty.Access.WRITE_ONLY) private String nodeId; /* * The managed integration runtime node status. */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private ManagedIntegrationRuntimeNodeStatus status; /* * The errors that occurred on this integration runtime node. */ - @JsonProperty(value = "errors") private List errors; /* * Properties of integration runtime node. */ - @JsonIgnore private Map additionalProperties; /** @@ -91,7 +88,6 @@ public ManagedIntegrationRuntimeNode withErrors(List additionalProperties() { return this.additionalProperties; } @@ -107,14 +103,6 @@ public ManagedIntegrationRuntimeNode withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -125,4 +113,59 @@ public void validate() { errors().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("errors", this.errors, (writer, element) -> writer.writeJson(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeNode from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeNode if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeNode. + */ + public static ManagedIntegrationRuntimeNode fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeNode deserializedManagedIntegrationRuntimeNode + = new ManagedIntegrationRuntimeNode(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("nodeId".equals(fieldName)) { + deserializedManagedIntegrationRuntimeNode.nodeId = reader.getString(); + } else if ("status".equals(fieldName)) { + deserializedManagedIntegrationRuntimeNode.status + = ManagedIntegrationRuntimeNodeStatus.fromString(reader.getString()); + } else if ("errors".equals(fieldName)) { + List errors + = reader.readArray(reader1 -> ManagedIntegrationRuntimeError.fromJson(reader1)); + deserializedManagedIntegrationRuntimeNode.errors = errors; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIntegrationRuntimeNode.additionalProperties = additionalProperties; + + return deserializedManagedIntegrationRuntimeNode; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNodeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNodeStatus.java index 6e38d8714209..bba5677be587 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNodeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeNodeStatus.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -48,7 +47,6 @@ public ManagedIntegrationRuntimeNodeStatus() { * @param name a name to look for. * @return the corresponding ManagedIntegrationRuntimeNodeStatus. */ - @JsonCreator public static ManagedIntegrationRuntimeNodeStatus fromString(String name) { return fromString(name, ManagedIntegrationRuntimeNodeStatus.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeOperationResult.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeOperationResult.java index 0e41cce6cdfb..cbfae4b94930 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeOperationResult.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeOperationResult.java @@ -5,10 +5,12 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.List; @@ -18,47 +20,41 @@ * Properties of managed integration runtime operation result. */ @Fluent -public final class ManagedIntegrationRuntimeOperationResult { +public final class ManagedIntegrationRuntimeOperationResult + implements JsonSerializable { /* * The operation type. Could be start or stop. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * The start time of the operation. */ - @JsonProperty(value = "startTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime startTime; /* * The operation result. */ - @JsonProperty(value = "result", access = JsonProperty.Access.WRITE_ONLY) private String result; /* * The error code. */ - @JsonProperty(value = "errorCode", access = JsonProperty.Access.WRITE_ONLY) private String errorCode; /* * Managed integration runtime error parameters. */ - @JsonProperty(value = "parameters", access = JsonProperty.Access.WRITE_ONLY) private List parameters; /* * The activity id for the operation request. */ - @JsonProperty(value = "activityId", access = JsonProperty.Access.WRITE_ONLY) private String activityId; /* * Properties of managed integration runtime operation result. */ - @JsonIgnore private Map additionalProperties; /** @@ -126,7 +122,6 @@ public String activityId() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -142,14 +137,6 @@ public ManagedIntegrationRuntimeOperationResult withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -157,4 +144,63 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeOperationResult from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeOperationResult if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeOperationResult. + */ + public static ManagedIntegrationRuntimeOperationResult fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeOperationResult deserializedManagedIntegrationRuntimeOperationResult + = new ManagedIntegrationRuntimeOperationResult(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedManagedIntegrationRuntimeOperationResult.type = reader.getString(); + } else if ("startTime".equals(fieldName)) { + deserializedManagedIntegrationRuntimeOperationResult.startTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("result".equals(fieldName)) { + deserializedManagedIntegrationRuntimeOperationResult.result = reader.getString(); + } else if ("errorCode".equals(fieldName)) { + deserializedManagedIntegrationRuntimeOperationResult.errorCode = reader.getString(); + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> reader1.getString()); + deserializedManagedIntegrationRuntimeOperationResult.parameters = parameters; + } else if ("activityId".equals(fieldName)) { + deserializedManagedIntegrationRuntimeOperationResult.activityId = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIntegrationRuntimeOperationResult.additionalProperties = additionalProperties; + + return deserializedManagedIntegrationRuntimeOperationResult; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java index 28e9ddb2e341..2369f563c738 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedIntegrationRuntimeStatus.java @@ -6,39 +6,42 @@ import com.azure.core.annotation.Immutable; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeStatusTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.time.OffsetDateTime; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Managed integration runtime status. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ManagedIntegrationRuntimeStatus.class, - visible = true) -@JsonTypeName("Managed") @Immutable public final class ManagedIntegrationRuntimeStatus extends IntegrationRuntimeStatus { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.MANAGED; /* * Managed integration runtime status type properties. */ - @JsonProperty(value = "typeProperties", required = true) private ManagedIntegrationRuntimeStatusTypeProperties innerTypeProperties = new ManagedIntegrationRuntimeStatusTypeProperties(); + /* + * The data factory name which the integration runtime belong to. + */ + private String dataFactoryName; + + /* + * The state of integration runtime. + */ + private IntegrationRuntimeState state; + /** * Creates an instance of ManagedIntegrationRuntimeStatus class. */ @@ -64,6 +67,26 @@ private ManagedIntegrationRuntimeStatusTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the dataFactoryName property: The data factory name which the integration runtime belong to. + * + * @return the dataFactoryName value. + */ + @Override + public String dataFactoryName() { + return this.dataFactoryName; + } + + /** + * Get the state property: The state of integration runtime. + * + * @return the state value. + */ + @Override + public IntegrationRuntimeState state() { + return this.state; + } + /** * Get the createTime property: The time at which the integration runtime was created, in ISO8601 format. * @@ -118,4 +141,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedIntegrationRuntimeStatus.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedIntegrationRuntimeStatus from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedIntegrationRuntimeStatus if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedIntegrationRuntimeStatus. + */ + public static ManagedIntegrationRuntimeStatus fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedIntegrationRuntimeStatus deserializedManagedIntegrationRuntimeStatus + = new ManagedIntegrationRuntimeStatus(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataFactoryName".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatus.dataFactoryName = reader.getString(); + } else if ("state".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatus.state + = IntegrationRuntimeState.fromString(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatus.innerTypeProperties + = ManagedIntegrationRuntimeStatusTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedManagedIntegrationRuntimeStatus.type + = IntegrationRuntimeType.fromString(reader.getString()); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedIntegrationRuntimeStatus.withAdditionalProperties(additionalProperties); + + return deserializedManagedIntegrationRuntimeStatus; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java index 40703bc3211a..6216facc6ac9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpoint.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,47 +18,40 @@ * Properties of a managed private endpoint. */ @Fluent -public final class ManagedPrivateEndpoint { +public final class ManagedPrivateEndpoint implements JsonSerializable { /* * The managed private endpoint connection state */ - @JsonProperty(value = "connectionState") private ConnectionStateProperties connectionState; /* * Fully qualified domain names */ - @JsonProperty(value = "fqdns") private List fqdns; /* * The groupId to which the managed private endpoint is created */ - @JsonProperty(value = "groupId") private String groupId; /* * Denotes whether the managed private endpoint is reserved */ - @JsonProperty(value = "isReserved", access = JsonProperty.Access.WRITE_ONLY) private Boolean isReserved; /* * The ARM resource ID of the resource to which the managed private endpoint is created */ - @JsonProperty(value = "privateLinkResourceId") private String privateLinkResourceId; /* * The managed private endpoint provisioning state */ - @JsonProperty(value = "provisioningState", access = JsonProperty.Access.WRITE_ONLY) private String provisioningState; /* * Properties of a managed private endpoint */ - @JsonIgnore private Map additionalProperties; /** @@ -171,7 +165,6 @@ public String provisioningState() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -187,14 +180,6 @@ public ManagedPrivateEndpoint withAdditionalProperties(Map addit return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -205,4 +190,65 @@ public void validate() { connectionState().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectionState", this.connectionState); + jsonWriter.writeArrayField("fqdns", this.fqdns, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("groupId", this.groupId); + jsonWriter.writeStringField("privateLinkResourceId", this.privateLinkResourceId); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedPrivateEndpoint from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedPrivateEndpoint if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedPrivateEndpoint. + */ + public static ManagedPrivateEndpoint fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedPrivateEndpoint deserializedManagedPrivateEndpoint = new ManagedPrivateEndpoint(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionState".equals(fieldName)) { + deserializedManagedPrivateEndpoint.connectionState = ConnectionStateProperties.fromJson(reader); + } else if ("fqdns".equals(fieldName)) { + List fqdns = reader.readArray(reader1 -> reader1.getString()); + deserializedManagedPrivateEndpoint.fqdns = fqdns; + } else if ("groupId".equals(fieldName)) { + deserializedManagedPrivateEndpoint.groupId = reader.getString(); + } else if ("isReserved".equals(fieldName)) { + deserializedManagedPrivateEndpoint.isReserved = reader.getNullable(JsonReader::getBoolean); + } else if ("privateLinkResourceId".equals(fieldName)) { + deserializedManagedPrivateEndpoint.privateLinkResourceId = reader.getString(); + } else if ("provisioningState".equals(fieldName)) { + deserializedManagedPrivateEndpoint.provisioningState = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedPrivateEndpoint.additionalProperties = additionalProperties; + + return deserializedManagedPrivateEndpoint; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java index 6a738e4f200d..f4858064c2f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedPrivateEndpointListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ManagedPrivateEndpointResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of managed private endpoint resources. */ @Fluent -public final class ManagedPrivateEndpointListResponse { +public final class ManagedPrivateEndpointListResponse implements JsonSerializable { /* * List of managed private endpoints. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedPrivateEndpointListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedPrivateEndpointListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedPrivateEndpointListResponse if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedPrivateEndpointListResponse. + */ + public static ManagedPrivateEndpointListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedPrivateEndpointListResponse deserializedManagedPrivateEndpointListResponse + = new ManagedPrivateEndpointListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> ManagedPrivateEndpointResourceInner.fromJson(reader1)); + deserializedManagedPrivateEndpointListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedManagedPrivateEndpointListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedPrivateEndpointListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetwork.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetwork.java index 1f6f0901058e..a2cb521e6a5e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetwork.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetwork.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,23 +17,20 @@ * A managed Virtual Network associated with the Azure Data Factory. */ @Fluent -public final class ManagedVirtualNetwork { +public final class ManagedVirtualNetwork implements JsonSerializable { /* * Managed Virtual Network ID. */ - @JsonProperty(value = "vNetId", access = JsonProperty.Access.WRITE_ONLY) private String vNetId; /* * Managed Virtual Network alias. */ - @JsonProperty(value = "alias", access = JsonProperty.Access.WRITE_ONLY) private String alias; /* * A managed Virtual Network associated with the Azure Data Factory */ - @JsonIgnore private Map additionalProperties; /** @@ -64,7 +62,6 @@ public String alias() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -80,14 +77,6 @@ public ManagedVirtualNetwork withAdditionalProperties(Map additi return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -95,4 +84,52 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedVirtualNetwork from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedVirtualNetwork if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ManagedVirtualNetwork. + */ + public static ManagedVirtualNetwork fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedVirtualNetwork deserializedManagedVirtualNetwork = new ManagedVirtualNetwork(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("vNetId".equals(fieldName)) { + deserializedManagedVirtualNetwork.vNetId = reader.getString(); + } else if ("alias".equals(fieldName)) { + deserializedManagedVirtualNetwork.alias = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedManagedVirtualNetwork.additionalProperties = additionalProperties; + + return deserializedManagedVirtualNetwork; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java index 1c8f31c29320..a38a492f8af3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ManagedVirtualNetworkResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of managed Virtual Network resources. */ @Fluent -public final class ManagedVirtualNetworkListResponse { +public final class ManagedVirtualNetworkListResponse implements JsonSerializable { /* * List of managed Virtual Networks. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +91,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedVirtualNetworkListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedVirtualNetworkListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedVirtualNetworkListResponse if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedVirtualNetworkListResponse. + */ + public static ManagedVirtualNetworkListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedVirtualNetworkListResponse deserializedManagedVirtualNetworkListResponse + = new ManagedVirtualNetworkListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> ManagedVirtualNetworkResourceInner.fromJson(reader1)); + deserializedManagedVirtualNetworkListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedManagedVirtualNetworkListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedVirtualNetworkListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java index 8c02d9783204..63c408830b0b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReference.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Managed Virtual Network reference type. */ @Fluent -public final class ManagedVirtualNetworkReference { +public final class ManagedVirtualNetworkReference implements JsonSerializable { /* * Managed Virtual Network reference type. */ - @JsonProperty(value = "type", required = true) private ManagedVirtualNetworkReferenceType type; /* * Reference ManagedVirtualNetwork name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /** @@ -90,4 +92,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ManagedVirtualNetworkReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ManagedVirtualNetworkReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ManagedVirtualNetworkReference if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ManagedVirtualNetworkReference. + */ + public static ManagedVirtualNetworkReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ManagedVirtualNetworkReference deserializedManagedVirtualNetworkReference + = new ManagedVirtualNetworkReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedManagedVirtualNetworkReference.type + = ManagedVirtualNetworkReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedManagedVirtualNetworkReference.referenceName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedManagedVirtualNetworkReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReferenceType.java index 080955edc143..19ee10d37a60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ManagedVirtualNetworkReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -33,7 +32,6 @@ public ManagedVirtualNetworkReferenceType() { * @param name a name to look for. * @return the corresponding ManagedVirtualNetworkReferenceType. */ - @JsonCreator public static ManagedVirtualNetworkReferenceType fromString(String name) { return fromString(name, ManagedVirtualNetworkReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMapping.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMapping.java index f00aad01555e..e0916ff6f765 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMapping.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMapping.java @@ -5,48 +5,46 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Source and target column mapping details. */ @Fluent -public final class MapperAttributeMapping { +public final class MapperAttributeMapping implements JsonSerializable { /* * Name of the target column. */ - @JsonProperty(value = "name") private String name; /* * Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved as 'Derived'. */ - @JsonProperty(value = "type") private MappingType type; /* * Name of the function used for 'Aggregate' and 'Derived' (except 'Advanced') type mapping. */ - @JsonProperty(value = "functionName") private String functionName; /* * Expression used for 'Aggregate' and 'Derived' type mapping. */ - @JsonProperty(value = "expression") private String expression; /* * Reference of the source column used in the mapping. It is used for 'Direct' mapping type only. */ - @JsonProperty(value = "attributeReference") private MapperAttributeReference attributeReference; /* * List of references for source columns. It is used for 'Derived' and 'Aggregate' type mappings only. */ - @JsonProperty(value = "attributeReferences") private List attributeReferences; /** @@ -196,4 +194,58 @@ public void validate() { attributeReferences().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("functionName", this.functionName); + jsonWriter.writeStringField("expression", this.expression); + jsonWriter.writeJsonField("attributeReference", this.attributeReference); + jsonWriter.writeArrayField("attributeReferences", this.attributeReferences, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperAttributeMapping from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperAttributeMapping if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperAttributeMapping. + */ + public static MapperAttributeMapping fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperAttributeMapping deserializedMapperAttributeMapping = new MapperAttributeMapping(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMapperAttributeMapping.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedMapperAttributeMapping.type = MappingType.fromString(reader.getString()); + } else if ("functionName".equals(fieldName)) { + deserializedMapperAttributeMapping.functionName = reader.getString(); + } else if ("expression".equals(fieldName)) { + deserializedMapperAttributeMapping.expression = reader.getString(); + } else if ("attributeReference".equals(fieldName)) { + deserializedMapperAttributeMapping.attributeReference = MapperAttributeReference.fromJson(reader); + } else if ("attributeReferences".equals(fieldName)) { + List attributeReferences + = reader.readArray(reader1 -> MapperAttributeReference.fromJson(reader1)); + deserializedMapperAttributeMapping.attributeReferences = attributeReferences; + } else { + reader.skipChildren(); + } + } + + return deserializedMapperAttributeMapping; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMappings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMappings.java index 782e0997657e..528bb12b982b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMappings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeMappings.java @@ -5,18 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Attribute mapping details. */ @Fluent -public final class MapperAttributeMappings { +public final class MapperAttributeMappings implements JsonSerializable { /* * List of attribute mappings. */ - @JsonProperty(value = "attributeMappings") private List attributeMappings; /** @@ -55,4 +58,43 @@ public void validate() { attributeMappings().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("attributeMappings", this.attributeMappings, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperAttributeMappings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperAttributeMappings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperAttributeMappings. + */ + public static MapperAttributeMappings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperAttributeMappings deserializedMapperAttributeMappings = new MapperAttributeMappings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("attributeMappings".equals(fieldName)) { + List attributeMappings + = reader.readArray(reader1 -> MapperAttributeMapping.fromJson(reader1)); + deserializedMapperAttributeMappings.attributeMappings = attributeMappings; + } else { + reader.skipChildren(); + } + } + + return deserializedMapperAttributeMappings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeReference.java index c641c5af177b..e4060af947c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperAttributeReference.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Attribute reference details for the referred column. */ @Fluent -public final class MapperAttributeReference { +public final class MapperAttributeReference implements JsonSerializable { /* * Name of the column. */ - @JsonProperty(value = "name") private String name; /* * Name of the table. */ - @JsonProperty(value = "entity") private String entity; /* * The connection reference for the connection. */ - @JsonProperty(value = "entityConnectionReference") private MapperConnectionReference entityConnectionReference; /** @@ -106,4 +107,47 @@ public void validate() { entityConnectionReference().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("entity", this.entity); + jsonWriter.writeJsonField("entityConnectionReference", this.entityConnectionReference); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperAttributeReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperAttributeReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperAttributeReference. + */ + public static MapperAttributeReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperAttributeReference deserializedMapperAttributeReference = new MapperAttributeReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMapperAttributeReference.name = reader.getString(); + } else if ("entity".equals(fieldName)) { + deserializedMapperAttributeReference.entity = reader.getString(); + } else if ("entityConnectionReference".equals(fieldName)) { + deserializedMapperAttributeReference.entityConnectionReference + = MapperConnectionReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperAttributeReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java index 11eec772aebb..113dd7c30a9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnection.java @@ -6,43 +6,42 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Source connection details. */ @Fluent -public final class MapperConnection { +public final class MapperConnection implements JsonSerializable { /* * Linked service reference. */ - @JsonProperty(value = "linkedService") private LinkedServiceReference linkedService; /* * Type of the linked service e.g.: AzureBlobFS. */ - @JsonProperty(value = "linkedServiceType") private String linkedServiceType; /* * Type of connection via linked service or dataset. */ - @JsonProperty(value = "type", required = true) private ConnectionType type; /* * A boolean indicating whether linked service is of type inline dataset. Currently only inline datasets are * supported. */ - @JsonProperty(value = "isInlineDataset") private Boolean isInlineDataset; /* * List of name/value pairs for connection properties. */ - @JsonProperty(value = "commonDslConnectorProperties") private List commonDslConnectorProperties; /** @@ -173,4 +172,56 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MapperConnection.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeJsonField("linkedService", this.linkedService); + jsonWriter.writeStringField("linkedServiceType", this.linkedServiceType); + jsonWriter.writeBooleanField("isInlineDataset", this.isInlineDataset); + jsonWriter.writeArrayField("commonDslConnectorProperties", this.commonDslConnectorProperties, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperConnection from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperConnection if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MapperConnection. + */ + public static MapperConnection fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperConnection deserializedMapperConnection = new MapperConnection(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedMapperConnection.type = ConnectionType.fromString(reader.getString()); + } else if ("linkedService".equals(fieldName)) { + deserializedMapperConnection.linkedService = LinkedServiceReference.fromJson(reader); + } else if ("linkedServiceType".equals(fieldName)) { + deserializedMapperConnection.linkedServiceType = reader.getString(); + } else if ("isInlineDataset".equals(fieldName)) { + deserializedMapperConnection.isInlineDataset = reader.getNullable(JsonReader::getBoolean); + } else if ("commonDslConnectorProperties".equals(fieldName)) { + List commonDslConnectorProperties + = reader.readArray(reader1 -> MapperDslConnectorProperties.fromJson(reader1)); + deserializedMapperConnection.commonDslConnectorProperties = commonDslConnectorProperties; + } else { + reader.skipChildren(); + } + } + + return deserializedMapperConnection; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnectionReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnectionReference.java index ebd61b4ae6d3..dd525bd6b15f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnectionReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperConnectionReference.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Source or target connection reference details. */ @Fluent -public final class MapperConnectionReference { +public final class MapperConnectionReference implements JsonSerializable { /* * Name of the connection */ - @JsonProperty(value = "connectionName") private String connectionName; /* * Type of connection via linked service or dataset. */ - @JsonProperty(value = "type") private ConnectionType type; /** @@ -77,4 +79,43 @@ public MapperConnectionReference withType(ConnectionType type) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectionName", this.connectionName); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperConnectionReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperConnectionReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperConnectionReference. + */ + public static MapperConnectionReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperConnectionReference deserializedMapperConnectionReference = new MapperConnectionReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectionName".equals(fieldName)) { + deserializedMapperConnectionReference.connectionName = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedMapperConnectionReference.type = ConnectionType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperConnectionReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperDslConnectorProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperDslConnectorProperties.java index b09718740cfe..da21c692aa7a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperDslConnectorProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperDslConnectorProperties.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Connector properties of a CDC table in terms of name / value pairs. */ @Fluent -public final class MapperDslConnectorProperties { +public final class MapperDslConnectorProperties implements JsonSerializable { /* * Name of the property. */ - @JsonProperty(value = "name") private String name; /* * Value of the property. */ - @JsonProperty(value = "value") private Object value; /** @@ -77,4 +79,43 @@ public MapperDslConnectorProperties withValue(Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperDslConnectorProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperDslConnectorProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperDslConnectorProperties. + */ + public static MapperDslConnectorProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperDslConnectorProperties deserializedMapperDslConnectorProperties = new MapperDslConnectorProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMapperDslConnectorProperties.name = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedMapperDslConnectorProperties.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperDslConnectorProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicy.java index e4d62a15714a..fe5c542970b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicy.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * CDC Policy. */ @Fluent -public final class MapperPolicy { +public final class MapperPolicy implements JsonSerializable { /* * Mode of running the CDC: batch vs continuous. */ - @JsonProperty(value = "mode") private String mode; /* * Defines the frequency and interval for running the CDC for batch mode. */ - @JsonProperty(value = "recurrence") private MapperPolicyRecurrence recurrence; /** @@ -80,4 +82,43 @@ public void validate() { recurrence().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("mode", this.mode); + jsonWriter.writeJsonField("recurrence", this.recurrence); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperPolicy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperPolicy. + */ + public static MapperPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperPolicy deserializedMapperPolicy = new MapperPolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("mode".equals(fieldName)) { + deserializedMapperPolicy.mode = reader.getString(); + } else if ("recurrence".equals(fieldName)) { + deserializedMapperPolicy.recurrence = MapperPolicyRecurrence.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicyRecurrence.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicyRecurrence.java index 693f2cd3d4cf..3e38672ee8ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicyRecurrence.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperPolicyRecurrence.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * CDC policy recurrence details. */ @Fluent -public final class MapperPolicyRecurrence { +public final class MapperPolicyRecurrence implements JsonSerializable { /* * Frequency of period in terms of 'Hour', 'Minute' or 'Second'. */ - @JsonProperty(value = "frequency") private FrequencyType frequency; /* * Actual interval value as per chosen frequency. */ - @JsonProperty(value = "interval") private Integer interval; /** @@ -77,4 +79,43 @@ public MapperPolicyRecurrence withInterval(Integer interval) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("frequency", this.frequency == null ? null : this.frequency.toString()); + jsonWriter.writeNumberField("interval", this.interval); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperPolicyRecurrence from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperPolicyRecurrence if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperPolicyRecurrence. + */ + public static MapperPolicyRecurrence fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperPolicyRecurrence deserializedMapperPolicyRecurrence = new MapperPolicyRecurrence(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("frequency".equals(fieldName)) { + deserializedMapperPolicyRecurrence.frequency = FrequencyType.fromString(reader.getString()); + } else if ("interval".equals(fieldName)) { + deserializedMapperPolicyRecurrence.interval = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperPolicyRecurrence; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperSourceConnectionsInfo.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperSourceConnectionsInfo.java index 28879227a5e0..326fcf0f0f09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperSourceConnectionsInfo.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperSourceConnectionsInfo.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A object which contains list of tables and connection details for a source connection. */ @Fluent -public final class MapperSourceConnectionsInfo { +public final class MapperSourceConnectionsInfo implements JsonSerializable { /* * List of source tables for a source connection. */ - @JsonProperty(value = "sourceEntities") private List sourceEntities; /* * Source connection details. */ - @JsonProperty(value = "connection") private MapperConnection connection; /** @@ -84,4 +86,45 @@ public void validate() { connection().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("sourceEntities", this.sourceEntities, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("connection", this.connection); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperSourceConnectionsInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperSourceConnectionsInfo if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperSourceConnectionsInfo. + */ + public static MapperSourceConnectionsInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperSourceConnectionsInfo deserializedMapperSourceConnectionsInfo = new MapperSourceConnectionsInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceEntities".equals(fieldName)) { + List sourceEntities = reader.readArray(reader1 -> MapperTable.fromJson(reader1)); + deserializedMapperSourceConnectionsInfo.sourceEntities = sourceEntities; + } else if ("connection".equals(fieldName)) { + deserializedMapperSourceConnectionsInfo.connection = MapperConnection.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperSourceConnectionsInfo; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTable.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTable.java index 4cecaa08ee88..f70553e31cf0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTable.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTable.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MapperTableProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * CDC table details. */ @Fluent -public final class MapperTable { +public final class MapperTable implements JsonSerializable { /* * Name of the table. */ - @JsonProperty(value = "name") private String name; /* * Table properties. */ - @JsonProperty(value = "properties") private MapperTableProperties innerProperties; /** @@ -117,4 +119,43 @@ public void validate() { innerProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeJsonField("properties", this.innerProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperTable from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperTable if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperTable. + */ + public static MapperTable fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperTable deserializedMapperTable = new MapperTable(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMapperTable.name = reader.getString(); + } else if ("properties".equals(fieldName)) { + deserializedMapperTable.innerProperties = MapperTableProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperTable; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTableSchema.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTableSchema.java index 59c2146215a1..c91e10e41f92 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTableSchema.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTableSchema.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Schema of a CDC table in terms of column names and their corresponding data types. */ @Fluent -public final class MapperTableSchema { +public final class MapperTableSchema implements JsonSerializable { /* * Name of the column. */ - @JsonProperty(value = "name") private String name; /* * Data type of the column. */ - @JsonProperty(value = "dataType") private String dataType; /** @@ -77,4 +79,43 @@ public MapperTableSchema withDataType(String dataType) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("dataType", this.dataType); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperTableSchema from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperTableSchema if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperTableSchema. + */ + public static MapperTableSchema fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperTableSchema deserializedMapperTableSchema = new MapperTableSchema(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMapperTableSchema.name = reader.getString(); + } else if ("dataType".equals(fieldName)) { + deserializedMapperTableSchema.dataType = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedMapperTableSchema; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTargetConnectionsInfo.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTargetConnectionsInfo.java index 6e3c283ca0bc..5de95ae8d694 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTargetConnectionsInfo.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MapperTargetConnectionsInfo.java @@ -5,36 +5,36 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * A object which contains list of tables and connection details for a target connection. */ @Fluent -public final class MapperTargetConnectionsInfo { +public final class MapperTargetConnectionsInfo implements JsonSerializable { /* * List of source tables for a target connection. */ - @JsonProperty(value = "targetEntities") private List targetEntities; /* * Source connection details. */ - @JsonProperty(value = "connection") private MapperConnection connection; /* * List of table mappings. */ - @JsonProperty(value = "dataMapperMappings") private List dataMapperMappings; /* * List of relationship info among the tables. */ - @JsonProperty(value = "relationships") private List relationships; /** @@ -139,4 +139,56 @@ public void validate() { dataMapperMappings().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("targetEntities", this.targetEntities, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("connection", this.connection); + jsonWriter.writeArrayField("dataMapperMappings", this.dataMapperMappings, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("relationships", this.relationships, + (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MapperTargetConnectionsInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MapperTargetConnectionsInfo if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MapperTargetConnectionsInfo. + */ + public static MapperTargetConnectionsInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MapperTargetConnectionsInfo deserializedMapperTargetConnectionsInfo = new MapperTargetConnectionsInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("targetEntities".equals(fieldName)) { + List targetEntities = reader.readArray(reader1 -> MapperTable.fromJson(reader1)); + deserializedMapperTargetConnectionsInfo.targetEntities = targetEntities; + } else if ("connection".equals(fieldName)) { + deserializedMapperTargetConnectionsInfo.connection = MapperConnection.fromJson(reader); + } else if ("dataMapperMappings".equals(fieldName)) { + List dataMapperMappings + = reader.readArray(reader1 -> DataMapperMapping.fromJson(reader1)); + deserializedMapperTargetConnectionsInfo.dataMapperMappings = dataMapperMappings; + } else if ("relationships".equals(fieldName)) { + List relationships = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMapperTargetConnectionsInfo.relationships = relationships; + } else { + reader.skipChildren(); + } + } + + return deserializedMapperTargetConnectionsInfo; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java index b3a0f8de2396..7e7bb640877f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingDataFlow.java @@ -5,31 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MappingDataFlowTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.util.List; /** * Mapping data flow. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MappingDataFlow.class, visible = true) -@JsonTypeName("MappingDataFlow") @Fluent public final class MappingDataFlow extends DataFlow { /* * Type of data flow. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MappingDataFlow"; /* * Mapping data flow type properties. */ - @JsonProperty(value = "typeProperties") private MappingDataFlowTypeProperties innerTypeProperties; /** @@ -211,4 +206,53 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MappingDataFlow from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MappingDataFlow if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MappingDataFlow. + */ + public static MappingDataFlow fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MappingDataFlow deserializedMappingDataFlow = new MappingDataFlow(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedMappingDataFlow.withDescription(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMappingDataFlow.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMappingDataFlow.withFolder(DataFlowFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMappingDataFlow.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMappingDataFlow.innerTypeProperties = MappingDataFlowTypeProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedMappingDataFlow; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingType.java index ae57abfe7cdd..42acd98fb456 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MappingType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public MappingType() { * @param name a name to look for. * @return the corresponding MappingType. */ - @JsonCreator public static MappingType fromString(String name) { return fromString(name, MappingType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java index 79d5e2891448..e3121b987787 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MariaDBLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * MariaDB server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBLinkedService.class, visible = true) -@JsonTypeName("MariaDB") @Fluent public final class MariaDBLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MariaDB"; /* * MariaDB server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MariaDBLinkedServiceTypeProperties innerTypeProperties = new MariaDBLinkedServiceTypeProperties(); /** @@ -303,4 +299,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MariaDBLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MariaDBLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MariaDBLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MariaDBLinkedService. + */ + public static MariaDBLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MariaDBLinkedService deserializedMariaDBLinkedService = new MariaDBLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMariaDBLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMariaDBLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMariaDBLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMariaDBLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMariaDBLinkedService.innerTypeProperties + = MariaDBLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMariaDBLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMariaDBLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMariaDBLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java index 1eda874fde68..84e499d9497e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity MariaDB server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBSource.class, visible = true) -@JsonTypeName("MariaDBSource") @Fluent public final class MariaDBSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MariaDBSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public MariaDBSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MariaDBSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MariaDBSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MariaDBSource. + */ + public static MariaDBSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MariaDBSource deserializedMariaDBSource = new MariaDBSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMariaDBSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMariaDBSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMariaDBSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMariaDBSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMariaDBSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMariaDBSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMariaDBSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMariaDBSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMariaDBSource.withAdditionalProperties(additionalProperties); + + return deserializedMariaDBSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java index 6f6259306d3f..0f81027ff50d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MariaDBTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * MariaDB server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MariaDBTableDataset.class, visible = true) -@JsonTypeName("MariaDBTable") @Fluent public final class MariaDBTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MariaDBTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MariaDBTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MariaDBTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MariaDBTableDataset. + */ + public static MariaDBTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MariaDBTableDataset deserializedMariaDBTableDataset = new MariaDBTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMariaDBTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMariaDBTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMariaDBTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMariaDBTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMariaDBTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMariaDBTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMariaDBTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMariaDBTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMariaDBTableDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMariaDBTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedMariaDBTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java index e78ce562b1ca..4f28dcf64f78 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MarketoLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Marketo server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoLinkedService.class, visible = true) -@JsonTypeName("Marketo") @Fluent public final class MarketoLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Marketo"; /* * Marketo server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MarketoLinkedServiceTypeProperties innerTypeProperties = new MarketoLinkedServiceTypeProperties(); /** @@ -282,4 +278,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MarketoLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MarketoLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MarketoLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MarketoLinkedService. + */ + public static MarketoLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MarketoLinkedService deserializedMarketoLinkedService = new MarketoLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMarketoLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMarketoLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMarketoLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMarketoLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMarketoLinkedService.innerTypeProperties + = MarketoLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMarketoLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMarketoLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMarketoLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java index 8e832754bdc0..ddfd184d69f7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Marketo server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoObjectDataset.class, visible = true) -@JsonTypeName("MarketoObject") @Fluent public final class MarketoObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MarketoObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MarketoObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MarketoObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MarketoObjectDataset. + */ + public static MarketoObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MarketoObjectDataset deserializedMarketoObjectDataset = new MarketoObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMarketoObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMarketoObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMarketoObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMarketoObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMarketoObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMarketoObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMarketoObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMarketoObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMarketoObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMarketoObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedMarketoObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java index 86b376ee7b0d..0cd2893b3f31 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MarketoSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Marketo server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MarketoSource.class, visible = true) -@JsonTypeName("MarketoSource") @Fluent public final class MarketoSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MarketoSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public MarketoSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MarketoSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MarketoSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MarketoSource. + */ + public static MarketoSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MarketoSource deserializedMarketoSource = new MarketoSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMarketoSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMarketoSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMarketoSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMarketoSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMarketoSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMarketoSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMarketoSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMarketoSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMarketoSource.withAdditionalProperties(additionalProperties); + + return deserializedMarketoSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MetadataItem.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MetadataItem.java index f05e491455ea..d9cd07d94afe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MetadataItem.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MetadataItem.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Specify the name and value of custom metadata item. */ @Fluent -public final class MetadataItem { +public final class MetadataItem implements JsonSerializable { /* * Metadata item key name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "name") private Object name; /* * Metadata item value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value") private Object value; /** @@ -77,4 +79,43 @@ public MetadataItem withValue(Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("name", this.name); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MetadataItem from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MetadataItem if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MetadataItem. + */ + public static MetadataItem fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MetadataItem deserializedMetadataItem = new MetadataItem(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedMetadataItem.name = reader.readUntyped(); + } else if ("value".equals(fieldName)) { + deserializedMetadataItem.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedMetadataItem; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java index a3725e92617e..5ec16755908c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Access linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MicrosoftAccessLinkedService.class, - visible = true) -@JsonTypeName("MicrosoftAccess") @Fluent public final class MicrosoftAccessLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MicrosoftAccess"; /* * Microsoft Access linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MicrosoftAccessLinkedServiceTypeProperties innerTypeProperties = new MicrosoftAccessLinkedServiceTypeProperties(); @@ -268,4 +260,72 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MicrosoftAccessLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessLinkedService if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MicrosoftAccessLinkedService. + */ + public static MicrosoftAccessLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessLinkedService deserializedMicrosoftAccessLinkedService = new MicrosoftAccessLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMicrosoftAccessLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMicrosoftAccessLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMicrosoftAccessLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMicrosoftAccessLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMicrosoftAccessLinkedService.innerTypeProperties + = MicrosoftAccessLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMicrosoftAccessLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMicrosoftAccessLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMicrosoftAccessLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java index 8297cdd04224..7267820fff09 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Microsoft Access sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MicrosoftAccessSink.class, visible = true) -@JsonTypeName("MicrosoftAccessSink") @Fluent public final class MicrosoftAccessSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MicrosoftAccessSink"; /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -131,4 +128,72 @@ public MicrosoftAccessSink withDisableMetricsCollection(Object disableMetricsCol public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MicrosoftAccessSink. + */ + public static MicrosoftAccessSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessSink deserializedMicrosoftAccessSink = new MicrosoftAccessSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedMicrosoftAccessSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedMicrosoftAccessSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedMicrosoftAccessSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedMicrosoftAccessSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMicrosoftAccessSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMicrosoftAccessSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMicrosoftAccessSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedMicrosoftAccessSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMicrosoftAccessSink.withAdditionalProperties(additionalProperties); + + return deserializedMicrosoftAccessSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java index 5b96e987075a..032f2c145358 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Microsoft Access. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MicrosoftAccessSource.class, visible = true) -@JsonTypeName("MicrosoftAccessSource") @Fluent public final class MicrosoftAccessSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MicrosoftAccessSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -140,4 +136,69 @@ public MicrosoftAccessSource withDisableMetricsCollection(Object disableMetricsC public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessSource if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MicrosoftAccessSource. + */ + public static MicrosoftAccessSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessSource deserializedMicrosoftAccessSource = new MicrosoftAccessSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMicrosoftAccessSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMicrosoftAccessSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMicrosoftAccessSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMicrosoftAccessSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMicrosoftAccessSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMicrosoftAccessSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMicrosoftAccessSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMicrosoftAccessSource.withAdditionalProperties(additionalProperties); + + return deserializedMicrosoftAccessSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java index 534f439af58d..7827ebec03de 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MicrosoftAccessTableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Microsoft Access table dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MicrosoftAccessTableDataset.class, - visible = true) -@JsonTypeName("MicrosoftAccessTable") @Fluent public final class MicrosoftAccessTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MicrosoftAccessTable"; /* * Microsoft Access table dataset properties. */ - @JsonProperty(value = "typeProperties") private MicrosoftAccessTableDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MicrosoftAccessTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MicrosoftAccessTableDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MicrosoftAccessTableDataset. + */ + public static MicrosoftAccessTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MicrosoftAccessTableDataset deserializedMicrosoftAccessTableDataset = new MicrosoftAccessTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMicrosoftAccessTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMicrosoftAccessTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMicrosoftAccessTableDataset.innerTypeProperties + = MicrosoftAccessTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMicrosoftAccessTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedMicrosoftAccessTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java index 0bee1cc82a1a..1a4bc0e80c14 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasCollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasCollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The MongoDB Atlas database dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MongoDbAtlasCollectionDataset.class, - visible = true) -@JsonTypeName("MongoDbAtlasCollection") @Fluent public final class MongoDbAtlasCollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbAtlasCollection"; /* * MongoDB Atlas database dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbAtlasCollectionDatasetTypeProperties innerTypeProperties = new MongoDbAtlasCollectionDatasetTypeProperties(); @@ -170,4 +162,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbAtlasCollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasCollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasCollectionDataset if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbAtlasCollectionDataset. + */ + public static MongoDbAtlasCollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasCollectionDataset deserializedMongoDbAtlasCollectionDataset + = new MongoDbAtlasCollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbAtlasCollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbAtlasCollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.innerTypeProperties + = MongoDbAtlasCollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbAtlasCollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbAtlasCollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbAtlasCollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java index a4eff9abf654..0fd4e3dcd7f5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for MongoDB Atlas data source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MongoDbAtlasLinkedService.class, - visible = true) -@JsonTypeName("MongoDbAtlas") @Fluent public final class MongoDbAtlasLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbAtlas"; /* * MongoDB Atlas linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbAtlasLinkedServiceTypeProperties innerTypeProperties = new MongoDbAtlasLinkedServiceTypeProperties(); /** @@ -192,4 +184,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbAtlasLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbAtlasLinkedService. + */ + public static MongoDbAtlasLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasLinkedService deserializedMongoDbAtlasLinkedService = new MongoDbAtlasLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMongoDbAtlasLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbAtlasLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbAtlasLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbAtlasLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbAtlasLinkedService.innerTypeProperties + = MongoDbAtlasLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbAtlasLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbAtlasLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbAtlasLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java index 2f0f217aa3df..92831188e337 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSink.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity MongoDB Atlas sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbAtlasSink.class, visible = true) -@JsonTypeName("MongoDbAtlasSink") @Fluent public final class MongoDbAtlasSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbAtlasSink"; /* @@ -29,7 +27,6 @@ public final class MongoDbAtlasSink extends CopySink { * default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -135,4 +132,72 @@ public MongoDbAtlasSink withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbAtlasSink. + */ + public static MongoDbAtlasSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasSink deserializedMongoDbAtlasSink = new MongoDbAtlasSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedMongoDbAtlasSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedMongoDbAtlasSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedMongoDbAtlasSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedMongoDbAtlasSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMongoDbAtlasSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMongoDbAtlasSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMongoDbAtlasSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedMongoDbAtlasSink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbAtlasSink.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbAtlasSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java index 6b347acc04cb..3155546c2e54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAtlasSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for a MongoDB Atlas database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbAtlasSource.class, visible = true) -@JsonTypeName("MongoDbAtlasSource") @Fluent public final class MongoDbAtlasSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbAtlasSource"; /* * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or * pass an empty document ({}). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "filter") private Object filter; /* * Cursor methods for Mongodb query */ - @JsonProperty(value = "cursorMethods") private MongoDbCursorMethodsProperties cursorMethods; /* @@ -42,21 +38,18 @@ public final class MongoDbAtlasSource extends CopySource { * cases, modifying the batch size will not affect the user or the application. This property's main purpose is to * avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "batchSize") private Object batchSize; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -237,4 +230,78 @@ public void validate() { cursorMethods().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("filter", this.filter); + jsonWriter.writeJsonField("cursorMethods", this.cursorMethods); + jsonWriter.writeUntypedField("batchSize", this.batchSize); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbAtlasSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbAtlasSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbAtlasSource. + */ + public static MongoDbAtlasSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbAtlasSource deserializedMongoDbAtlasSource = new MongoDbAtlasSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMongoDbAtlasSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMongoDbAtlasSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMongoDbAtlasSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMongoDbAtlasSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMongoDbAtlasSource.type = reader.getString(); + } else if ("filter".equals(fieldName)) { + deserializedMongoDbAtlasSource.filter = reader.readUntyped(); + } else if ("cursorMethods".equals(fieldName)) { + deserializedMongoDbAtlasSource.cursorMethods = MongoDbCursorMethodsProperties.fromJson(reader); + } else if ("batchSize".equals(fieldName)) { + deserializedMongoDbAtlasSource.batchSize = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMongoDbAtlasSource.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMongoDbAtlasSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbAtlasSource.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbAtlasSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAuthenticationType.java index dd78d8fce4ce..29f89c6f5fad 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public MongoDbAuthenticationType() { * @param name a name to look for. * @return the corresponding MongoDbAuthenticationType. */ - @JsonCreator public static MongoDbAuthenticationType fromString(String name) { return fromString(name, MongoDbAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java index 20964d323d9c..4d6a0f90edeb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbCollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The MongoDB database dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MongoDbCollectionDataset.class, - visible = true) -@JsonTypeName("MongoDbCollection") @Fluent public final class MongoDbCollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbCollection"; /* * MongoDB database dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbCollectionDatasetTypeProperties innerTypeProperties = new MongoDbCollectionDatasetTypeProperties(); /** @@ -169,4 +161,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbCollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbCollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbCollectionDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbCollectionDataset. + */ + public static MongoDbCollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbCollectionDataset deserializedMongoDbCollectionDataset = new MongoDbCollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMongoDbCollectionDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbCollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMongoDbCollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMongoDbCollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbCollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbCollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMongoDbCollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbCollectionDataset.innerTypeProperties + = MongoDbCollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbCollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbCollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbCollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java index 5ce034886e85..55a98a0fc0c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbCursorMethodsProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,39 +17,34 @@ * Cursor methods for Mongodb query. */ @Fluent -public final class MongoDbCursorMethodsProperties { +public final class MongoDbCursorMethodsProperties implements JsonSerializable { /* * Specifies the fields to return in the documents that match the query filter. To return all fields in the matching * documents, omit this parameter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "project") private Object project; /* * Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType * string). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sort") private Object sort; /* * Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful * in implementing paginated results. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "skip") private Object skip; /* * Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a * SQL database. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "limit") private Object limit; /* * Cursor methods for Mongodb query */ - @JsonIgnore private Map additionalProperties; /** @@ -150,7 +146,6 @@ public MongoDbCursorMethodsProperties withLimit(Object limit) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -166,14 +161,6 @@ public MongoDbCursorMethodsProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -181,4 +168,61 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("project", this.project); + jsonWriter.writeUntypedField("sort", this.sort); + jsonWriter.writeUntypedField("skip", this.skip); + jsonWriter.writeUntypedField("limit", this.limit); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbCursorMethodsProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbCursorMethodsProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbCursorMethodsProperties. + */ + public static MongoDbCursorMethodsProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbCursorMethodsProperties deserializedMongoDbCursorMethodsProperties + = new MongoDbCursorMethodsProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("project".equals(fieldName)) { + deserializedMongoDbCursorMethodsProperties.project = reader.readUntyped(); + } else if ("sort".equals(fieldName)) { + deserializedMongoDbCursorMethodsProperties.sort = reader.readUntyped(); + } else if ("skip".equals(fieldName)) { + deserializedMongoDbCursorMethodsProperties.skip = reader.readUntyped(); + } else if ("limit".equals(fieldName)) { + deserializedMongoDbCursorMethodsProperties.limit = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbCursorMethodsProperties.additionalProperties = additionalProperties; + + return deserializedMongoDbCursorMethodsProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java index 51587da71577..cb8f1d429666 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for MongoDb data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbLinkedService.class, visible = true) -@JsonTypeName("MongoDb") @Fluent public final class MongoDbLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDb"; /* * MongoDB linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbLinkedServiceTypeProperties innerTypeProperties = new MongoDbLinkedServiceTypeProperties(); /** @@ -357,4 +353,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbLinkedService. + */ + public static MongoDbLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbLinkedService deserializedMongoDbLinkedService = new MongoDbLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMongoDbLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbLinkedService.innerTypeProperties + = MongoDbLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java index 19dddd309ad0..195646656083 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for a MongoDB database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbSource.class, visible = true) -@JsonTypeName("MongoDbSource") @Fluent public final class MongoDbSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbSource"; /* * Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -142,4 +138,69 @@ public MongoDbSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbSource. + */ + public static MongoDbSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbSource deserializedMongoDbSource = new MongoDbSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMongoDbSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMongoDbSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMongoDbSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMongoDbSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMongoDbSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMongoDbSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMongoDbSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbSource.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java index cf5882125101..2f8fcd0feb7e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2CollectionDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2CollectionDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The MongoDB database dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MongoDbV2CollectionDataset.class, - visible = true) -@JsonTypeName("MongoDbV2Collection") @Fluent public final class MongoDbV2CollectionDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbV2Collection"; /* * MongoDB database dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbV2CollectionDatasetTypeProperties innerTypeProperties = new MongoDbV2CollectionDatasetTypeProperties(); @@ -170,4 +162,81 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbV2CollectionDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2CollectionDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2CollectionDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbV2CollectionDataset. + */ + public static MongoDbV2CollectionDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2CollectionDataset deserializedMongoDbV2CollectionDataset = new MongoDbV2CollectionDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbV2CollectionDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbV2CollectionDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.innerTypeProperties + = MongoDbV2CollectionDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbV2CollectionDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbV2CollectionDataset.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbV2CollectionDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java index e33513aa9588..7c3574948280 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2LinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for MongoDB data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2LinkedService.class, visible = true) -@JsonTypeName("MongoDbV2") @Fluent public final class MongoDbV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbV2"; /* * MongoDB linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MongoDbV2LinkedServiceTypeProperties innerTypeProperties = new MongoDbV2LinkedServiceTypeProperties(); /** @@ -163,4 +159,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MongoDbV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2LinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MongoDbV2LinkedService. + */ + public static MongoDbV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2LinkedService deserializedMongoDbV2LinkedService = new MongoDbV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMongoDbV2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMongoDbV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMongoDbV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMongoDbV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMongoDbV2LinkedService.innerTypeProperties + = MongoDbV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMongoDbV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java index bf5116d087cb..a5fae7f2a54c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Sink.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity MongoDB sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2Sink.class, visible = true) -@JsonTypeName("MongoDbV2Sink") @Fluent public final class MongoDbV2Sink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbV2Sink"; /* @@ -29,7 +27,6 @@ public final class MongoDbV2Sink extends CopySink { * default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -135,4 +132,72 @@ public MongoDbV2Sink withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2Sink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2Sink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbV2Sink. + */ + public static MongoDbV2Sink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2Sink deserializedMongoDbV2Sink = new MongoDbV2Sink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedMongoDbV2Sink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedMongoDbV2Sink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedMongoDbV2Sink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedMongoDbV2Sink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMongoDbV2Sink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMongoDbV2Sink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMongoDbV2Sink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedMongoDbV2Sink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbV2Sink.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbV2Sink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java index c1bc187009f9..9799a1ffa237 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MongoDbV2Source.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for a MongoDB database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MongoDbV2Source.class, visible = true) -@JsonTypeName("MongoDbV2Source") @Fluent public final class MongoDbV2Source extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MongoDbV2Source"; /* * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or * pass an empty document ({}). Type: string (or Expression with resultType string). */ - @JsonProperty(value = "filter") private Object filter; /* * Cursor methods for Mongodb query */ - @JsonProperty(value = "cursorMethods") private MongoDbCursorMethodsProperties cursorMethods; /* @@ -42,21 +38,18 @@ public final class MongoDbV2Source extends CopySource { * modifying the batch size will not affect the user or the application. This property's main purpose is to avoid * hit the limitation of response size. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "batchSize") private Object batchSize; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -237,4 +230,78 @@ public void validate() { cursorMethods().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("filter", this.filter); + jsonWriter.writeJsonField("cursorMethods", this.cursorMethods); + jsonWriter.writeUntypedField("batchSize", this.batchSize); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MongoDbV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MongoDbV2Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MongoDbV2Source. + */ + public static MongoDbV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MongoDbV2Source deserializedMongoDbV2Source = new MongoDbV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMongoDbV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMongoDbV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMongoDbV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMongoDbV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMongoDbV2Source.type = reader.getString(); + } else if ("filter".equals(fieldName)) { + deserializedMongoDbV2Source.filter = reader.readUntyped(); + } else if ("cursorMethods".equals(fieldName)) { + deserializedMongoDbV2Source.cursorMethods = MongoDbCursorMethodsProperties.fromJson(reader); + } else if ("batchSize".equals(fieldName)) { + deserializedMongoDbV2Source.batchSize = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMongoDbV2Source.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMongoDbV2Source.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMongoDbV2Source.withAdditionalProperties(additionalProperties); + + return deserializedMongoDbV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java index ad52f9cf605a..509fce7263c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MultiplePipelineTrigger.java @@ -5,42 +5,34 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Base class for all triggers that support one to many model for trigger to pipeline. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = MultiplePipelineTrigger.class, - visible = true) -@JsonTypeName("MultiplePipelineTrigger") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "ScheduleTrigger", value = ScheduleTrigger.class), - @JsonSubTypes.Type(name = "BlobTrigger", value = BlobTrigger.class), - @JsonSubTypes.Type(name = "BlobEventsTrigger", value = BlobEventsTrigger.class), - @JsonSubTypes.Type(name = "CustomEventsTrigger", value = CustomEventsTrigger.class) }) @Fluent public class MultiplePipelineTrigger extends Trigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MultiplePipelineTrigger"; /* * Pipelines that need to be started. */ - @JsonProperty(value = "pipelines") private List pipelines; + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of MultiplePipelineTrigger class. */ @@ -77,6 +69,17 @@ public MultiplePipelineTrigger withPipelines(List pipe return this; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -107,4 +110,97 @@ public void validate() { pipelines().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeArrayField("pipelines", this.pipelines, (writer, element) -> writer.writeJson(element)); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MultiplePipelineTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MultiplePipelineTrigger if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the MultiplePipelineTrigger. + */ + public static MultiplePipelineTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("ScheduleTrigger".equals(discriminatorValue)) { + return ScheduleTrigger.fromJson(readerToUse.reset()); + } else if ("BlobTrigger".equals(discriminatorValue)) { + return BlobTrigger.fromJson(readerToUse.reset()); + } else if ("BlobEventsTrigger".equals(discriminatorValue)) { + return BlobEventsTrigger.fromJson(readerToUse.reset()); + } else if ("CustomEventsTrigger".equals(discriminatorValue)) { + return CustomEventsTrigger.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static MultiplePipelineTrigger fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MultiplePipelineTrigger deserializedMultiplePipelineTrigger = new MultiplePipelineTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedMultiplePipelineTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedMultiplePipelineTrigger.runtimeState + = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMultiplePipelineTrigger.withAnnotations(annotations); + } else if ("type".equals(fieldName)) { + deserializedMultiplePipelineTrigger.type = reader.getString(); + } else if ("pipelines".equals(fieldName)) { + List pipelines + = reader.readArray(reader1 -> TriggerPipelineReference.fromJson(reader1)); + deserializedMultiplePipelineTrigger.pipelines = pipelines; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMultiplePipelineTrigger.withAdditionalProperties(additionalProperties); + + return deserializedMultiplePipelineTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java index c18fbd2bb835..566aff0c418d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MySqlLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for MySQL data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlLinkedService.class, visible = true) -@JsonTypeName("MySql") @Fluent public final class MySqlLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MySql"; /* * MySQL linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private MySqlLinkedServiceTypeProperties innerTypeProperties = new MySqlLinkedServiceTypeProperties(); /** @@ -353,4 +349,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(MySqlLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MySqlLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MySqlLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MySqlLinkedService. + */ + public static MySqlLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MySqlLinkedService deserializedMySqlLinkedService = new MySqlLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedMySqlLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMySqlLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMySqlLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMySqlLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedMySqlLinkedService.innerTypeProperties + = MySqlLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedMySqlLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMySqlLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedMySqlLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java index 0f5be970a130..f93d0564957d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for MySQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlSource.class, visible = true) -@JsonTypeName("MySqlSource") @Fluent public final class MySqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MySqlSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public MySqlSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MySqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MySqlSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the MySqlSource. + */ + public static MySqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MySqlSource deserializedMySqlSource = new MySqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedMySqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedMySqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedMySqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedMySqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedMySqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedMySqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedMySqlSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedMySqlSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMySqlSource.withAdditionalProperties(additionalProperties); + + return deserializedMySqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java index ac005be1e65d..01f749543ffe 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/MySqlTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.MySqlTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The MySQL table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = MySqlTableDataset.class, visible = true) -@JsonTypeName("MySqlTable") @Fluent public final class MySqlTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "MySqlTable"; /* * MySQL table dataset properties. */ - @JsonProperty(value = "typeProperties") private MySqlTableDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of MySqlTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of MySqlTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the MySqlTableDataset. + */ + public static MySqlTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + MySqlTableDataset deserializedMySqlTableDataset = new MySqlTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedMySqlTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedMySqlTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedMySqlTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedMySqlTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedMySqlTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedMySqlTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedMySqlTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedMySqlTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedMySqlTableDataset.innerTypeProperties + = MySqlTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedMySqlTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedMySqlTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java index 2f962b0fbb02..a6868faa32f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.NetezzaLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Netezza linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaLinkedService.class, visible = true) -@JsonTypeName("Netezza") @Fluent public final class NetezzaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Netezza"; /* * Netezza linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private NetezzaLinkedServiceTypeProperties innerTypeProperties = new NetezzaLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(NetezzaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the NetezzaLinkedService. + */ + public static NetezzaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaLinkedService deserializedNetezzaLinkedService = new NetezzaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedNetezzaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedNetezzaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedNetezzaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedNetezzaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedNetezzaLinkedService.innerTypeProperties + = NetezzaLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedNetezzaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedNetezzaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedNetezzaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java index 67f86c000314..e064a45a5b89 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaPartitionSettings.java @@ -5,32 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for Netezza source partitioning. */ @Fluent -public final class NetezzaPartitionSettings { +public final class NetezzaPartitionSettings implements JsonSerializable { /* * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /** @@ -112,4 +113,46 @@ public NetezzaPartitionSettings withPartitionLowerBound(Object partitionLowerBou */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaPartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaPartitionSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the NetezzaPartitionSettings. + */ + public static NetezzaPartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaPartitionSettings deserializedNetezzaPartitionSettings = new NetezzaPartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionColumnName".equals(fieldName)) { + deserializedNetezzaPartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedNetezzaPartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedNetezzaPartitionSettings.partitionLowerBound = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedNetezzaPartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java index c8a928ac338e..21000ebc81ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Netezza source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaSource.class, visible = true) -@JsonTypeName("NetezzaSource") @Fluent public final class NetezzaSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "NetezzaSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", * "DataSlice", "DynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Netezza source partitioning. */ - @JsonProperty(value = "partitionSettings") private NetezzaPartitionSettings partitionSettings; /** @@ -189,4 +184,78 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the NetezzaSource. + */ + public static NetezzaSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaSource deserializedNetezzaSource = new NetezzaSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedNetezzaSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedNetezzaSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedNetezzaSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedNetezzaSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedNetezzaSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedNetezzaSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedNetezzaSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedNetezzaSource.query = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedNetezzaSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedNetezzaSource.partitionSettings = NetezzaPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedNetezzaSource.withAdditionalProperties(additionalProperties); + + return deserializedNetezzaSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java index 6c994cf4627f..02d0d62102a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NetezzaTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.NetezzaTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Netezza dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NetezzaTableDataset.class, visible = true) -@JsonTypeName("NetezzaTable") @Fluent public final class NetezzaTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "NetezzaTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private NetezzaTableDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NetezzaTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NetezzaTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the NetezzaTableDataset. + */ + public static NetezzaTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NetezzaTableDataset deserializedNetezzaTableDataset = new NetezzaTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedNetezzaTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedNetezzaTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedNetezzaTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedNetezzaTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedNetezzaTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedNetezzaTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedNetezzaTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedNetezzaTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedNetezzaTableDataset.innerTypeProperties + = NetezzaTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedNetezzaTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedNetezzaTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java index 992a26197d27..e7815db8890a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Notebook parameter. */ @Fluent -public final class NotebookParameter { +public final class NotebookParameter implements JsonSerializable { /* * Notebook parameter value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value") private Object value; /* * Notebook parameter type. */ - @JsonProperty(value = "type") private NotebookParameterType type; /** @@ -77,4 +79,43 @@ public NotebookParameter withType(NotebookParameterType type) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("value", this.value); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of NotebookParameter from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of NotebookParameter if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the NotebookParameter. + */ + public static NotebookParameter fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + NotebookParameter deserializedNotebookParameter = new NotebookParameter(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedNotebookParameter.value = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedNotebookParameter.type = NotebookParameterType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedNotebookParameter; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java index fb1da1ff3153..306799b3b9df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public NotebookParameterType() { * @param name a name to look for. * @return the corresponding NotebookParameterType. */ - @JsonCreator public static NotebookParameterType fromString(String name) { return fromString(name, NotebookParameterType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java index e188f220b62d..6cc13e8c1feb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public NotebookReferenceType() { * @param name a name to look for. * @return the corresponding NotebookReferenceType. */ - @JsonCreator public static NotebookReferenceType fromString(String name) { return fromString(name, NotebookReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAadServicePrincipalCredentialType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAadServicePrincipalCredentialType.java index f181959cd85d..1a5e89b26dcd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAadServicePrincipalCredentialType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAadServicePrincipalCredentialType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -40,7 +39,6 @@ public ODataAadServicePrincipalCredentialType() { * @param name a name to look for. * @return the corresponding ODataAadServicePrincipalCredentialType. */ - @JsonCreator public static ODataAadServicePrincipalCredentialType fromString(String name) { return fromString(name, ODataAadServicePrincipalCredentialType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAuthenticationType.java index 446c761132a7..ddaf98a244e7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public ODataAuthenticationType() { * @param name a name to look for. * @return the corresponding ODataAuthenticationType. */ - @JsonCreator public static ODataAuthenticationType fromString(String name) { return fromString(name, ODataAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java index 8b3ee1f0c161..9bbf26778e5b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ODataLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Open Data Protocol (OData) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataLinkedService.class, visible = true) -@JsonTypeName("OData") @Fluent public final class ODataLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OData"; /* * OData linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ODataLinkedServiceTypeProperties innerTypeProperties = new ODataLinkedServiceTypeProperties(); /** @@ -465,4 +461,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ODataLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ODataLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ODataLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ODataLinkedService. + */ + public static ODataLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ODataLinkedService deserializedODataLinkedService = new ODataLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedODataLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedODataLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedODataLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedODataLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedODataLinkedService.innerTypeProperties + = ODataLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedODataLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedODataLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedODataLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java index 0a979ff92586..286e750113ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataResourceDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ODataResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Open Data Protocol (OData) resource dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataResourceDataset.class, visible = true) -@JsonTypeName("ODataResource") @Fluent public final class ODataResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ODataResource"; /* * OData dataset properties. */ - @JsonProperty(value = "typeProperties") private ODataResourceDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ODataResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ODataResourceDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ODataResourceDataset. + */ + public static ODataResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ODataResourceDataset deserializedODataResourceDataset = new ODataResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedODataResourceDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedODataResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedODataResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedODataResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedODataResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedODataResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedODataResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedODataResourceDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedODataResourceDataset.innerTypeProperties + = ODataResourceDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedODataResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedODataResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java index abc237dbb830..7ca71c826d91 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ODataSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for OData source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ODataSource.class, visible = true) -@JsonTypeName("ODataSource") @Fluent public final class ODataSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ODataSource"; /* * OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -35,14 +32,12 @@ public final class ODataSource extends CopySource { * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -172,4 +167,72 @@ public ODataSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ODataSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ODataSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ODataSource. + */ + public static ODataSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ODataSource deserializedODataSource = new ODataSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedODataSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedODataSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedODataSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedODataSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedODataSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedODataSource.query = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedODataSource.httpRequestTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedODataSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedODataSource.withAdditionalProperties(additionalProperties); + + return deserializedODataSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java index 66f18cab4795..805bc56d4990 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OdbcLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Open Database Connectivity (ODBC) linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcLinkedService.class, visible = true) -@JsonTypeName("Odbc") @Fluent public final class OdbcLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Odbc"; /* * ODBC linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private OdbcLinkedServiceTypeProperties innerTypeProperties = new OdbcLinkedServiceTypeProperties(); /** @@ -263,4 +259,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OdbcLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OdbcLinkedService. + */ + public static OdbcLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcLinkedService deserializedOdbcLinkedService = new OdbcLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedOdbcLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOdbcLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOdbcLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOdbcLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedOdbcLinkedService.innerTypeProperties + = OdbcLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOdbcLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOdbcLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedOdbcLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java index ec509c353151..5d5cc06ce460 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity ODBC sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcSink.class, visible = true) -@JsonTypeName("OdbcSink") @Fluent public final class OdbcSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OdbcSink"; /* * A query to execute before starting the copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -131,4 +128,72 @@ public OdbcSink withDisableMetricsCollection(Object disableMetricsCollection) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the OdbcSink. + */ + public static OdbcSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcSink deserializedOdbcSink = new OdbcSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedOdbcSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedOdbcSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedOdbcSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedOdbcSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOdbcSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOdbcSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOdbcSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedOdbcSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOdbcSink.withAdditionalProperties(additionalProperties); + + return deserializedOdbcSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java index a79b5792a3ca..6e1b24b4cc2f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for ODBC databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcSource.class, visible = true) -@JsonTypeName("OdbcSource") @Fluent public final class OdbcSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OdbcSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public OdbcSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the OdbcSource. + */ + public static OdbcSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcSource deserializedOdbcSource = new OdbcSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedOdbcSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedOdbcSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOdbcSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOdbcSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedOdbcSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedOdbcSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOdbcSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedOdbcSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOdbcSource.withAdditionalProperties(additionalProperties); + + return deserializedOdbcSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java index 715ab896d28c..7d33932fafe0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OdbcTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OdbcTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The ODBC table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OdbcTableDataset.class, visible = true) -@JsonTypeName("OdbcTable") @Fluent public final class OdbcTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OdbcTable"; /* * ODBC table dataset properties. */ - @JsonProperty(value = "typeProperties") private OdbcTableDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OdbcTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OdbcTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OdbcTableDataset. + */ + public static OdbcTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OdbcTableDataset deserializedOdbcTableDataset = new OdbcTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedOdbcTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOdbcTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedOdbcTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedOdbcTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOdbcTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOdbcTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedOdbcTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedOdbcTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedOdbcTableDataset.innerTypeProperties = OdbcTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOdbcTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedOdbcTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java index 3c7d0a079cc5..5b3747e8f0d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Dataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.Office365DatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Office365 account. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365Dataset.class, visible = true) -@JsonTypeName("Office365Table") @Fluent public final class Office365Dataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Office365Table"; /* * Office365 dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private Office365DatasetTypeProperties innerTypeProperties = new Office365DatasetTypeProperties(); /** @@ -190,4 +186,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Office365Dataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Office365Dataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Office365Dataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Office365Dataset. + */ + public static Office365Dataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Office365Dataset deserializedOffice365Dataset = new Office365Dataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedOffice365Dataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOffice365Dataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedOffice365Dataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedOffice365Dataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOffice365Dataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOffice365Dataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedOffice365Dataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedOffice365Dataset.innerTypeProperties = Office365DatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOffice365Dataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOffice365Dataset.withAdditionalProperties(additionalProperties); + + return deserializedOffice365Dataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java index c7b3735060da..5f9a1d0e751f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365LinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.Office365LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Office365 linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365LinkedService.class, visible = true) -@JsonTypeName("Office365") @Fluent public final class Office365LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Office365"; /* * Office365 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private Office365LinkedServiceTypeProperties innerTypeProperties = new Office365LinkedServiceTypeProperties(); /** @@ -236,4 +232,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Office365LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Office365LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Office365LinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Office365LinkedService. + */ + public static Office365LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Office365LinkedService deserializedOffice365LinkedService = new Office365LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedOffice365LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOffice365LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOffice365LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOffice365LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedOffice365LinkedService.innerTypeProperties + = Office365LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOffice365LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOffice365LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedOffice365LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java index d254fbb71ae0..bad9030874a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Office365Source.java @@ -5,61 +5,53 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for an Office 365 service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Office365Source.class, visible = true) -@JsonTypeName("Office365Source") @Fluent public final class Office365Source extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Office365Source"; /* * The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). */ - @JsonProperty(value = "allowedGroups") private Object allowedGroups; /* * The user scope uri. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userScopeFilterUri") private Object userScopeFilterUri; /* * The Column to apply the and . Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "dateFilterColumn") private Object dateFilterColumn; /* * Start time of the requested range for this dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "startTime") private Object startTime; /* * End time of the requested range for this dataset. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "endTime") private Object endTime; /* * The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array * of objects). itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] */ - @JsonProperty(value = "outputColumns") private Object outputColumns; /** @@ -255,4 +247,81 @@ public Office365Source withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("allowedGroups", this.allowedGroups); + jsonWriter.writeUntypedField("userScopeFilterUri", this.userScopeFilterUri); + jsonWriter.writeUntypedField("dateFilterColumn", this.dateFilterColumn); + jsonWriter.writeUntypedField("startTime", this.startTime); + jsonWriter.writeUntypedField("endTime", this.endTime); + jsonWriter.writeUntypedField("outputColumns", this.outputColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Office365Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Office365Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the Office365Source. + */ + public static Office365Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Office365Source deserializedOffice365Source = new Office365Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedOffice365Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedOffice365Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOffice365Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOffice365Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOffice365Source.type = reader.getString(); + } else if ("allowedGroups".equals(fieldName)) { + deserializedOffice365Source.allowedGroups = reader.readUntyped(); + } else if ("userScopeFilterUri".equals(fieldName)) { + deserializedOffice365Source.userScopeFilterUri = reader.readUntyped(); + } else if ("dateFilterColumn".equals(fieldName)) { + deserializedOffice365Source.dateFilterColumn = reader.readUntyped(); + } else if ("startTime".equals(fieldName)) { + deserializedOffice365Source.startTime = reader.readUntyped(); + } else if ("endTime".equals(fieldName)) { + deserializedOffice365Source.endTime = reader.readUntyped(); + } else if ("outputColumns".equals(fieldName)) { + deserializedOffice365Source.outputColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOffice365Source.withAdditionalProperties(additionalProperties); + + return deserializedOffice365Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationDisplay.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationDisplay.java index 193af8b77513..7d9b4b76d8b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationDisplay.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationDisplay.java @@ -5,35 +5,35 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Metadata associated with the operation. */ @Fluent -public final class OperationDisplay { +public final class OperationDisplay implements JsonSerializable { /* * The description of the operation. */ - @JsonProperty(value = "description") private String description; /* * The name of the provider. */ - @JsonProperty(value = "provider") private String provider; /* * The name of the resource type on which the operation is performed. */ - @JsonProperty(value = "resource") private String resource; /* * The type of operation: get, read, delete, etc. */ - @JsonProperty(value = "operation") private String operation; /** @@ -129,4 +129,49 @@ public OperationDisplay withOperation(String operation) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeStringField("provider", this.provider); + jsonWriter.writeStringField("resource", this.resource); + jsonWriter.writeStringField("operation", this.operation); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationDisplay from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationDisplay if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationDisplay. + */ + public static OperationDisplay fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationDisplay deserializedOperationDisplay = new OperationDisplay(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedOperationDisplay.description = reader.getString(); + } else if ("provider".equals(fieldName)) { + deserializedOperationDisplay.provider = reader.getString(); + } else if ("resource".equals(fieldName)) { + deserializedOperationDisplay.resource = reader.getString(); + } else if ("operation".equals(fieldName)) { + deserializedOperationDisplay.operation = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationDisplay; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationListResponse.java index d7db4ac196b0..e882843f964c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationListResponse.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OperationInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of operations that can be performed by the Data Factory service. */ @Fluent -public final class OperationListResponse { +public final class OperationListResponse implements JsonSerializable { /* * List of Data Factory operations supported by the Data Factory resource provider. */ - @JsonProperty(value = "value") private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -82,4 +84,44 @@ public void validate() { value().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationListResponse if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationListResponse. + */ + public static OperationListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationListResponse deserializedOperationListResponse = new OperationListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> OperationInner.fromJson(reader1)); + deserializedOperationListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedOperationListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationLogSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationLogSpecification.java index aeca27bdc4a6..427f26dc3b95 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationLogSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationLogSpecification.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Details about an operation related to logs. */ @Fluent -public final class OperationLogSpecification { +public final class OperationLogSpecification implements JsonSerializable { /* * The name of the log category. */ - @JsonProperty(value = "name") private String name; /* * Localized display name. */ - @JsonProperty(value = "displayName") private String displayName; /* * Blobs created in the customer storage account, per hour. */ - @JsonProperty(value = "blobDuration") private String blobDuration; /** @@ -103,4 +104,46 @@ public OperationLogSpecification withBlobDuration(String blobDuration) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("displayName", this.displayName); + jsonWriter.writeStringField("blobDuration", this.blobDuration); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationLogSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationLogSpecification if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationLogSpecification. + */ + public static OperationLogSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationLogSpecification deserializedOperationLogSpecification = new OperationLogSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedOperationLogSpecification.name = reader.getString(); + } else if ("displayName".equals(fieldName)) { + deserializedOperationLogSpecification.displayName = reader.getString(); + } else if ("blobDuration".equals(fieldName)) { + deserializedOperationLogSpecification.blobDuration = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationLogSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricAvailability.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricAvailability.java index 621d2f9e16d0..c5fb83d5950d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricAvailability.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricAvailability.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Defines how often data for a metric becomes available. */ @Fluent -public final class OperationMetricAvailability { +public final class OperationMetricAvailability implements JsonSerializable { /* * The granularity for the metric. */ - @JsonProperty(value = "timeGrain") private String timeGrain; /* * Blob created in the customer storage account, per hour. */ - @JsonProperty(value = "blobDuration") private String blobDuration; /** @@ -77,4 +79,43 @@ public OperationMetricAvailability withBlobDuration(String blobDuration) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("timeGrain", this.timeGrain); + jsonWriter.writeStringField("blobDuration", this.blobDuration); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationMetricAvailability from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationMetricAvailability if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationMetricAvailability. + */ + public static OperationMetricAvailability fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationMetricAvailability deserializedOperationMetricAvailability = new OperationMetricAvailability(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("timeGrain".equals(fieldName)) { + deserializedOperationMetricAvailability.timeGrain = reader.getString(); + } else if ("blobDuration".equals(fieldName)) { + deserializedOperationMetricAvailability.blobDuration = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationMetricAvailability; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricDimension.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricDimension.java index 6bfd361ba16c..fb168315700d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricDimension.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricDimension.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Defines the metric dimension. */ @Fluent -public final class OperationMetricDimension { +public final class OperationMetricDimension implements JsonSerializable { /* * The name of the dimension for the metric. */ - @JsonProperty(value = "name") private String name; /* * The display name of the metric dimension. */ - @JsonProperty(value = "displayName") private String displayName; /* * Whether the dimension should be exported to Azure Monitor. */ - @JsonProperty(value = "toBeExportedForShoebox") private Boolean toBeExportedForShoebox; /** @@ -103,4 +104,47 @@ public OperationMetricDimension withToBeExportedForShoebox(Boolean toBeExportedF */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("displayName", this.displayName); + jsonWriter.writeBooleanField("toBeExportedForShoebox", this.toBeExportedForShoebox); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationMetricDimension from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationMetricDimension if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationMetricDimension. + */ + public static OperationMetricDimension fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationMetricDimension deserializedOperationMetricDimension = new OperationMetricDimension(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedOperationMetricDimension.name = reader.getString(); + } else if ("displayName".equals(fieldName)) { + deserializedOperationMetricDimension.displayName = reader.getString(); + } else if ("toBeExportedForShoebox".equals(fieldName)) { + deserializedOperationMetricDimension.toBeExportedForShoebox + = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedOperationMetricDimension; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricSpecification.java index 66fc0be30620..0a3425ba64bf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationMetricSpecification.java @@ -5,72 +5,66 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Details about an operation related to metrics. */ @Fluent -public final class OperationMetricSpecification { +public final class OperationMetricSpecification implements JsonSerializable { /* * The name of the metric. */ - @JsonProperty(value = "name") private String name; /* * Localized display name of the metric. */ - @JsonProperty(value = "displayName") private String displayName; /* * The description of the metric. */ - @JsonProperty(value = "displayDescription") private String displayDescription; /* * The unit that the metric is measured in. */ - @JsonProperty(value = "unit") private String unit; /* * The type of metric aggregation. */ - @JsonProperty(value = "aggregationType") private String aggregationType; /* * Whether or not the service is using regional MDM accounts. */ - @JsonProperty(value = "enableRegionalMdmAccount") private String enableRegionalMdmAccount; /* * The name of the MDM account. */ - @JsonProperty(value = "sourceMdmAccount") private String sourceMdmAccount; /* * The name of the MDM namespace. */ - @JsonProperty(value = "sourceMdmNamespace") private String sourceMdmNamespace; /* * Defines how often data for metrics becomes available. */ - @JsonProperty(value = "availabilities") private List availabilities; /* * Defines the metric dimension. */ - @JsonProperty(value = "dimensions") private List dimensions; /** @@ -292,4 +286,72 @@ public void validate() { dimensions().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("displayName", this.displayName); + jsonWriter.writeStringField("displayDescription", this.displayDescription); + jsonWriter.writeStringField("unit", this.unit); + jsonWriter.writeStringField("aggregationType", this.aggregationType); + jsonWriter.writeStringField("enableRegionalMdmAccount", this.enableRegionalMdmAccount); + jsonWriter.writeStringField("sourceMdmAccount", this.sourceMdmAccount); + jsonWriter.writeStringField("sourceMdmNamespace", this.sourceMdmNamespace); + jsonWriter.writeArrayField("availabilities", this.availabilities, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("dimensions", this.dimensions, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationMetricSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationMetricSpecification if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationMetricSpecification. + */ + public static OperationMetricSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationMetricSpecification deserializedOperationMetricSpecification = new OperationMetricSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedOperationMetricSpecification.name = reader.getString(); + } else if ("displayName".equals(fieldName)) { + deserializedOperationMetricSpecification.displayName = reader.getString(); + } else if ("displayDescription".equals(fieldName)) { + deserializedOperationMetricSpecification.displayDescription = reader.getString(); + } else if ("unit".equals(fieldName)) { + deserializedOperationMetricSpecification.unit = reader.getString(); + } else if ("aggregationType".equals(fieldName)) { + deserializedOperationMetricSpecification.aggregationType = reader.getString(); + } else if ("enableRegionalMdmAccount".equals(fieldName)) { + deserializedOperationMetricSpecification.enableRegionalMdmAccount = reader.getString(); + } else if ("sourceMdmAccount".equals(fieldName)) { + deserializedOperationMetricSpecification.sourceMdmAccount = reader.getString(); + } else if ("sourceMdmNamespace".equals(fieldName)) { + deserializedOperationMetricSpecification.sourceMdmNamespace = reader.getString(); + } else if ("availabilities".equals(fieldName)) { + List availabilities + = reader.readArray(reader1 -> OperationMetricAvailability.fromJson(reader1)); + deserializedOperationMetricSpecification.availabilities = availabilities; + } else if ("dimensions".equals(fieldName)) { + List dimensions + = reader.readArray(reader1 -> OperationMetricDimension.fromJson(reader1)); + deserializedOperationMetricSpecification.dimensions = dimensions; + } else { + reader.skipChildren(); + } + } + + return deserializedOperationMetricSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationServiceSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationServiceSpecification.java index 5961fcc67e0c..0129cd839cbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationServiceSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OperationServiceSpecification.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Details about a service operation. */ @Fluent -public final class OperationServiceSpecification { +public final class OperationServiceSpecification implements JsonSerializable { /* * Details about operations related to logs. */ - @JsonProperty(value = "logSpecifications") private List logSpecifications; /* * Details about operations related to metrics. */ - @JsonProperty(value = "metricSpecifications") private List metricSpecifications; /** @@ -85,4 +87,50 @@ public void validate() { metricSpecifications().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("logSpecifications", this.logSpecifications, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("metricSpecifications", this.metricSpecifications, + (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OperationServiceSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OperationServiceSpecification if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OperationServiceSpecification. + */ + public static OperationServiceSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OperationServiceSpecification deserializedOperationServiceSpecification + = new OperationServiceSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("logSpecifications".equals(fieldName)) { + List logSpecifications + = reader.readArray(reader1 -> OperationLogSpecification.fromJson(reader1)); + deserializedOperationServiceSpecification.logSpecifications = logSpecifications; + } else if ("metricSpecifications".equals(fieldName)) { + List metricSpecifications + = reader.readArray(reader1 -> OperationMetricSpecification.fromJson(reader1)); + deserializedOperationServiceSpecification.metricSpecifications = metricSpecifications; + } else { + reader.skipChildren(); + } + } + + return deserializedOperationServiceSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java index 13d3cb1630af..3f8aefb41a7d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OracleCloudStorageLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Oracle Cloud Storage. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleCloudStorageLinkedService.class, - visible = true) -@JsonTypeName("OracleCloudStorage") @Fluent public final class OracleCloudStorageLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleCloudStorage"; /* * Oracle Cloud Storage linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private OracleCloudStorageLinkedServiceTypeProperties innerTypeProperties = new OracleCloudStorageLinkedServiceTypeProperties(); @@ -220,4 +212,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OracleCloudStorageLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleCloudStorageLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleCloudStorageLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleCloudStorageLinkedService. + */ + public static OracleCloudStorageLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleCloudStorageLinkedService deserializedOracleCloudStorageLinkedService + = new OracleCloudStorageLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedOracleCloudStorageLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOracleCloudStorageLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOracleCloudStorageLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOracleCloudStorageLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedOracleCloudStorageLinkedService.innerTypeProperties + = OracleCloudStorageLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOracleCloudStorageLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleCloudStorageLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedOracleCloudStorageLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java index ac59d1e5ef1f..7e87466299a9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageLocation.java @@ -5,39 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of Oracle Cloud Storage dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleCloudStorageLocation.class, - visible = true) -@JsonTypeName("OracleCloudStorageLocation") @Fluent public final class OracleCloudStorageLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleCloudStorageLocation"; /* * Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "bucketName") private Object bucketName; /* * Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "version") private Object version; /** @@ -127,4 +119,63 @@ public OracleCloudStorageLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + jsonWriter.writeUntypedField("version", this.version); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleCloudStorageLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleCloudStorageLocation if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleCloudStorageLocation. + */ + public static OracleCloudStorageLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleCloudStorageLocation deserializedOracleCloudStorageLocation = new OracleCloudStorageLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedOracleCloudStorageLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedOracleCloudStorageLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOracleCloudStorageLocation.type = reader.getString(); + } else if ("bucketName".equals(fieldName)) { + deserializedOracleCloudStorageLocation.bucketName = reader.readUntyped(); + } else if ("version".equals(fieldName)) { + deserializedOracleCloudStorageLocation.version = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleCloudStorageLocation.withAdditionalProperties(additionalProperties); + + return deserializedOracleCloudStorageLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java index 90d9c1d4a7e9..5352ce7418aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleCloudStorageReadSettings.java @@ -5,90 +5,74 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Oracle Cloud Storage read settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleCloudStorageReadSettings.class, - visible = true) -@JsonTypeName("OracleCloudStorageReadSettings") @Fluent public final class OracleCloudStorageReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleCloudStorageReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "prefix") private Object prefix; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /** @@ -354,4 +338,88 @@ public OracleCloudStorageReadSettings withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("prefix", this.prefix); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleCloudStorageReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleCloudStorageReadSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleCloudStorageReadSettings. + */ + public static OracleCloudStorageReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleCloudStorageReadSettings deserializedOracleCloudStorageReadSettings + = new OracleCloudStorageReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("prefix".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.prefix = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.fileListPath = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedOracleCloudStorageReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleCloudStorageReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedOracleCloudStorageReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java index d84effceb2a5..90a7c967ddc4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OracleLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Oracle database. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleLinkedService.class, visible = true) -@JsonTypeName("Oracle") @Fluent public final class OracleLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Oracle"; /* * Oracle database linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private OracleLinkedServiceTypeProperties innerTypeProperties = new OracleLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OracleLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleLinkedService. + */ + public static OracleLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleLinkedService deserializedOracleLinkedService = new OracleLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedOracleLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOracleLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOracleLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOracleLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedOracleLinkedService.innerTypeProperties + = OracleLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOracleLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedOracleLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java index e18eba4d0201..2bbb5527425e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OraclePartitionSettings.java @@ -5,38 +5,38 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for Oracle source partitioning. */ @Fluent -public final class OraclePartitionSettings { +public final class OraclePartitionSettings implements JsonSerializable { /* * Names of the physical partitions of Oracle table. */ - @JsonProperty(value = "partitionNames") private Object partitionNames; /* * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /** @@ -138,4 +138,49 @@ public OraclePartitionSettings withPartitionLowerBound(Object partitionLowerBoun */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionNames", this.partitionNames); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OraclePartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OraclePartitionSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the OraclePartitionSettings. + */ + public static OraclePartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OraclePartitionSettings deserializedOraclePartitionSettings = new OraclePartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionNames".equals(fieldName)) { + deserializedOraclePartitionSettings.partitionNames = reader.readUntyped(); + } else if ("partitionColumnName".equals(fieldName)) { + deserializedOraclePartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedOraclePartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedOraclePartitionSettings.partitionLowerBound = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedOraclePartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java index 815fb8769314..9b0f307d1e1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OracleServiceCloudLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Oracle Service Cloud linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleServiceCloudLinkedService.class, - visible = true) -@JsonTypeName("OracleServiceCloud") @Fluent public final class OracleServiceCloudLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleServiceCloud"; /* * Oracle Service Cloud linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private OracleServiceCloudLinkedServiceTypeProperties innerTypeProperties = new OracleServiceCloudLinkedServiceTypeProperties(); @@ -289,4 +281,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(OracleServiceCloudLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleServiceCloudLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleServiceCloudLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleServiceCloudLinkedService. + */ + public static OracleServiceCloudLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleServiceCloudLinkedService deserializedOracleServiceCloudLinkedService + = new OracleServiceCloudLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedOracleServiceCloudLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOracleServiceCloudLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOracleServiceCloudLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOracleServiceCloudLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedOracleServiceCloudLinkedService.innerTypeProperties + = OracleServiceCloudLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedOracleServiceCloudLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleServiceCloudLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedOracleServiceCloudLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java index 9c0819f4b528..b372a3672f03 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Oracle Service Cloud dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleServiceCloudObjectDataset.class, - visible = true) -@JsonTypeName("OracleServiceCloudObject") @Fluent public final class OracleServiceCloudObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleServiceCloudObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleServiceCloudObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleServiceCloudObjectDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleServiceCloudObjectDataset. + */ + public static OracleServiceCloudObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleServiceCloudObjectDataset deserializedOracleServiceCloudObjectDataset + = new OracleServiceCloudObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOracleServiceCloudObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOracleServiceCloudObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedOracleServiceCloudObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleServiceCloudObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedOracleServiceCloudObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java index fb09f41e441d..e55171a7bf90 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleServiceCloudSource.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Oracle Service Cloud source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = OracleServiceCloudSource.class, - visible = true) -@JsonTypeName("OracleServiceCloudSource") @Fluent public final class OracleServiceCloudSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleServiceCloudSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -135,4 +128,72 @@ public OracleServiceCloudSource withDisableMetricsCollection(Object disableMetri public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleServiceCloudSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleServiceCloudSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleServiceCloudSource. + */ + public static OracleServiceCloudSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleServiceCloudSource deserializedOracleServiceCloudSource = new OracleServiceCloudSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedOracleServiceCloudSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedOracleServiceCloudSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOracleServiceCloudSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOracleServiceCloudSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedOracleServiceCloudSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedOracleServiceCloudSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOracleServiceCloudSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedOracleServiceCloudSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleServiceCloudSource.withAdditionalProperties(additionalProperties); + + return deserializedOracleServiceCloudSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java index 91f431604127..e364ee78a580 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSink.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Oracle sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleSink.class, visible = true) -@JsonTypeName("OracleSink") @Fluent public final class OracleSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleSink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /** @@ -129,4 +126,72 @@ public OracleSink withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the OracleSink. + */ + public static OracleSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleSink deserializedOracleSink = new OracleSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedOracleSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedOracleSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedOracleSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedOracleSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOracleSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOracleSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOracleSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedOracleSink.preCopyScript = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleSink.withAdditionalProperties(additionalProperties); + + return deserializedOracleSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java index 36697c5be298..ad0f1d460a84 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleSource.java @@ -5,56 +5,49 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Oracle source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleSource.class, visible = true) -@JsonTypeName("OracleSource") @Fluent public final class OracleSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleSource"; /* * Oracle reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "oracleReaderQuery") private Object oracleReaderQuery; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Oracle source partitioning. */ - @JsonProperty(value = "partitionSettings") private OraclePartitionSettings partitionSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -227,4 +220,78 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("oracleReaderQuery", this.oracleReaderQuery); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the OracleSource. + */ + public static OracleSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleSource deserializedOracleSource = new OracleSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedOracleSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedOracleSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOracleSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOracleSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOracleSource.type = reader.getString(); + } else if ("oracleReaderQuery".equals(fieldName)) { + deserializedOracleSource.oracleReaderQuery = reader.readUntyped(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedOracleSource.queryTimeout = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedOracleSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedOracleSource.partitionSettings = OraclePartitionSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedOracleSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleSource.withAdditionalProperties(additionalProperties); + + return deserializedOracleSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java index b60389b0917a..dc367659fe45 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OracleTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OracleTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The on-premises Oracle database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OracleTableDataset.class, visible = true) -@JsonTypeName("OracleTable") @Fluent public final class OracleTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OracleTable"; /* * On-premises Oracle dataset properties. */ - @JsonProperty(value = "typeProperties") private OracleTableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OracleTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OracleTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OracleTableDataset. + */ + public static OracleTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OracleTableDataset deserializedOracleTableDataset = new OracleTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedOracleTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOracleTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedOracleTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedOracleTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOracleTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOracleTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedOracleTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedOracleTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedOracleTableDataset.innerTypeProperties + = OracleTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOracleTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedOracleTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java index 054ab19eda37..42f422adc1af 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.OrcDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * ORC dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcDataset.class, visible = true) -@JsonTypeName("Orc") @Fluent public final class OrcDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Orc"; /* * ORC dataset properties. */ - @JsonProperty(value = "typeProperties") private OrcDatasetTypeProperties innerTypeProperties; /** @@ -181,4 +177,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcDataset if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the OrcDataset. + */ + public static OrcDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcDataset deserializedOrcDataset = new OrcDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedOrcDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedOrcDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedOrcDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedOrcDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedOrcDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedOrcDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedOrcDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedOrcDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedOrcDataset.innerTypeProperties = OrcDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOrcDataset.withAdditionalProperties(additionalProperties); + + return deserializedOrcDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java index d8f0a0615f01..e1e1b7d07a23 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcFormat.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The data stored in Optimized Row Columnar (ORC) format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcFormat.class, visible = true) -@JsonTypeName("OrcFormat") @Fluent public final class OrcFormat extends DatasetStorageFormat { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OrcFormat"; /** @@ -67,4 +65,57 @@ public OrcFormat withDeserializer(Object deserializer) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("serializer", serializer()); + jsonWriter.writeUntypedField("deserializer", deserializer()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcFormat if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the OrcFormat. + */ + public static OrcFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcFormat deserializedOrcFormat = new OrcFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serializer".equals(fieldName)) { + deserializedOrcFormat.withSerializer(reader.readUntyped()); + } else if ("deserializer".equals(fieldName)) { + deserializedOrcFormat.withDeserializer(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOrcFormat.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOrcFormat.withAdditionalProperties(additionalProperties); + + return deserializedOrcFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java index f9ab94417b6a..89ec929ad375 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity ORC sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcSink.class, visible = true) -@JsonTypeName("OrcSink") @Fluent public final class OrcSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OrcSink"; /* * ORC store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /* * ORC format settings. */ - @JsonProperty(value = "formatSettings") private OrcWriteSettings formatSettings; /** @@ -161,4 +157,75 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcSink if the JsonReader was pointing to an instance of it, or null if it was pointing to + * JSON null. + * @throws IOException If an error occurs while reading the OrcSink. + */ + public static OrcSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcSink deserializedOrcSink = new OrcSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedOrcSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedOrcSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedOrcSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedOrcSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOrcSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOrcSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOrcSink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedOrcSink.storeSettings = StoreWriteSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedOrcSink.formatSettings = OrcWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOrcSink.withAdditionalProperties(additionalProperties); + + return deserializedOrcSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java index 45ccc1841d96..39d1828622ee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity ORC source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcSource.class, visible = true) -@JsonTypeName("OrcSource") @Fluent public final class OrcSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OrcSource"; /* * ORC store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -143,4 +139,69 @@ public void validate() { storeSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the OrcSource. + */ + public static OrcSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcSource deserializedOrcSource = new OrcSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedOrcSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedOrcSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedOrcSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedOrcSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedOrcSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedOrcSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedOrcSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOrcSource.withAdditionalProperties(additionalProperties); + + return deserializedOrcSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java index 3af7b7a3db2b..c00e69fc5638 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/OrcWriteSettings.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Orc write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = OrcWriteSettings.class, visible = true) -@JsonTypeName("OrcWriteSettings") @Fluent public final class OrcWriteSettings extends FormatWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "OrcWriteSettings"; /* * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* * Specifies the file name pattern _. when copy from non-file based store * without partitionOptions. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; /** @@ -109,4 +105,57 @@ public OrcWriteSettings withFileNamePrefix(Object fileNamePrefix) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("maxRowsPerFile", this.maxRowsPerFile); + jsonWriter.writeUntypedField("fileNamePrefix", this.fileNamePrefix); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of OrcWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of OrcWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the OrcWriteSettings. + */ + public static OrcWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + OrcWriteSettings deserializedOrcWriteSettings = new OrcWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedOrcWriteSettings.type = reader.getString(); + } else if ("maxRowsPerFile".equals(fieldName)) { + deserializedOrcWriteSettings.maxRowsPerFile = reader.readUntyped(); + } else if ("fileNamePrefix".equals(fieldName)) { + deserializedOrcWriteSettings.fileNamePrefix = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedOrcWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedOrcWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java index 5d47365dd271..c37a61087120 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PackageStore.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Package store for the SSIS integration runtime. */ @Fluent -public final class PackageStore { +public final class PackageStore implements JsonSerializable { /* * The name of the package store */ - @JsonProperty(value = "name", required = true) private String name; /* * The package store linked service reference. */ - @JsonProperty(value = "packageStoreLinkedService", required = true) private EntityReference packageStoreLinkedService; /** @@ -91,4 +93,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PackageStore.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeJsonField("packageStoreLinkedService", this.packageStoreLinkedService); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PackageStore from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PackageStore if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PackageStore. + */ + public static PackageStore fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PackageStore deserializedPackageStore = new PackageStore(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedPackageStore.name = reader.getString(); + } else if ("packageStoreLinkedService".equals(fieldName)) { + deserializedPackageStore.packageStoreLinkedService = EntityReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedPackageStore; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java index 06f37aa3990b..46b398974a36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterSpecification.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Definition of a single parameter for an entity. */ @Fluent -public final class ParameterSpecification { +public final class ParameterSpecification implements JsonSerializable { /* * Parameter type. */ - @JsonProperty(value = "type", required = true) private ParameterType type; /* * Default value of parameter. */ - @JsonProperty(value = "defaultValue") private Object defaultValue; /** @@ -84,4 +86,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ParameterSpecification.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("defaultValue", this.defaultValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParameterSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParameterSpecification if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ParameterSpecification. + */ + public static ParameterSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParameterSpecification deserializedParameterSpecification = new ParameterSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedParameterSpecification.type = ParameterType.fromString(reader.getString()); + } else if ("defaultValue".equals(fieldName)) { + deserializedParameterSpecification.defaultValue = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedParameterSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterType.java index 4c45c5d7328a..89da1666f99b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParameterType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -62,7 +61,6 @@ public ParameterType() { * @param name a name to look for. * @return the corresponding ParameterType. */ - @JsonCreator public static ParameterType fromString(String name) { return fromString(name, ParameterType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java index 470864efbd0d..51ccc341215a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ParquetDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Parquet dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetDataset.class, visible = true) -@JsonTypeName("Parquet") @Fluent public final class ParquetDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Parquet"; /* * Parquet dataset properties. */ - @JsonProperty(value = "typeProperties") private ParquetDatasetTypeProperties innerTypeProperties; /** @@ -181,4 +177,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ParquetDataset. + */ + public static ParquetDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetDataset deserializedParquetDataset = new ParquetDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedParquetDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedParquetDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedParquetDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedParquetDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedParquetDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedParquetDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedParquetDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedParquetDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedParquetDataset.innerTypeProperties = ParquetDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetDataset.withAdditionalProperties(additionalProperties); + + return deserializedParquetDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java index 261cb07e3e26..42753a035cf8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetFormat.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The data stored in Parquet format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetFormat.class, visible = true) -@JsonTypeName("ParquetFormat") @Fluent public final class ParquetFormat extends DatasetStorageFormat { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ParquetFormat"; /** @@ -67,4 +65,57 @@ public ParquetFormat withDeserializer(Object deserializer) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("serializer", serializer()); + jsonWriter.writeUntypedField("deserializer", deserializer()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetFormat if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ParquetFormat. + */ + public static ParquetFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetFormat deserializedParquetFormat = new ParquetFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serializer".equals(fieldName)) { + deserializedParquetFormat.withSerializer(reader.readUntyped()); + } else if ("deserializer".equals(fieldName)) { + deserializedParquetFormat.withDeserializer(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedParquetFormat.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetFormat.withAdditionalProperties(additionalProperties); + + return deserializedParquetFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java index f755783d8aed..bcc5f5d151d2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Parquet read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetReadSettings.class, visible = true) -@JsonTypeName("ParquetReadSettings") @Fluent public final class ParquetReadSettings extends FormatReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ParquetReadSettings"; /* * Compression settings. */ - @JsonProperty(value = "compressionProperties") private CompressionReadSettings compressionProperties; /** @@ -78,4 +75,54 @@ public void validate() { compressionProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("compressionProperties", this.compressionProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ParquetReadSettings. + */ + public static ParquetReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetReadSettings deserializedParquetReadSettings = new ParquetReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedParquetReadSettings.type = reader.getString(); + } else if ("compressionProperties".equals(fieldName)) { + deserializedParquetReadSettings.compressionProperties = CompressionReadSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedParquetReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java index a76451adb762..a18ae8317f1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Parquet sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetSink.class, visible = true) -@JsonTypeName("ParquetSink") @Fluent public final class ParquetSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ParquetSink"; /* * Parquet store settings. */ - @JsonProperty(value = "storeSettings") private StoreWriteSettings storeSettings; /* * Parquet format settings. */ - @JsonProperty(value = "formatSettings") private ParquetWriteSettings formatSettings; /** @@ -161,4 +157,75 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ParquetSink. + */ + public static ParquetSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetSink deserializedParquetSink = new ParquetSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedParquetSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedParquetSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedParquetSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedParquetSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedParquetSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedParquetSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedParquetSink.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedParquetSink.storeSettings = StoreWriteSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedParquetSink.formatSettings = ParquetWriteSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetSink.withAdditionalProperties(additionalProperties); + + return deserializedParquetSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java index ef725334b579..4c40eadb8804 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Parquet source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetSource.class, visible = true) -@JsonTypeName("ParquetSource") @Fluent public final class ParquetSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ParquetSource"; /* * Parquet store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Parquet format settings. */ - @JsonProperty(value = "formatSettings") private ParquetReadSettings formatSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -172,4 +167,72 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ParquetSource. + */ + public static ParquetSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetSource deserializedParquetSource = new ParquetSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedParquetSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedParquetSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedParquetSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedParquetSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedParquetSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedParquetSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedParquetSource.formatSettings = ParquetReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedParquetSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetSource.withAdditionalProperties(additionalProperties); + + return deserializedParquetSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java index 39c9af211119..c838ea6b43ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ParquetWriteSettings.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Parquet write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ParquetWriteSettings.class, visible = true) -@JsonTypeName("ParquetWriteSettings") @Fluent public final class ParquetWriteSettings extends FormatWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ParquetWriteSettings"; /* * Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "maxRowsPerFile") private Object maxRowsPerFile; /* * Specifies the file name pattern _. when copy from non-file based store * without partitionOptions. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileNamePrefix") private Object fileNamePrefix; /** @@ -109,4 +105,57 @@ public ParquetWriteSettings withFileNamePrefix(Object fileNamePrefix) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("maxRowsPerFile", this.maxRowsPerFile); + jsonWriter.writeUntypedField("fileNamePrefix", this.fileNamePrefix); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ParquetWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ParquetWriteSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ParquetWriteSettings. + */ + public static ParquetWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ParquetWriteSettings deserializedParquetWriteSettings = new ParquetWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedParquetWriteSettings.type = reader.getString(); + } else if ("maxRowsPerFile".equals(fieldName)) { + deserializedParquetWriteSettings.maxRowsPerFile = reader.readUntyped(); + } else if ("fileNamePrefix".equals(fieldName)) { + deserializedParquetWriteSettings.fileNamePrefix = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedParquetWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedParquetWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java index 7b1bc7f4c8c8..681e1d9d2300 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PaypalLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Paypal Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalLinkedService.class, visible = true) -@JsonTypeName("Paypal") @Fluent public final class PaypalLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Paypal"; /* * Paypal Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private PaypalLinkedServiceTypeProperties innerTypeProperties = new PaypalLinkedServiceTypeProperties(); /** @@ -282,4 +278,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PaypalLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PaypalLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PaypalLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PaypalLinkedService. + */ + public static PaypalLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PaypalLinkedService deserializedPaypalLinkedService = new PaypalLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedPaypalLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPaypalLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPaypalLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPaypalLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedPaypalLinkedService.innerTypeProperties + = PaypalLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedPaypalLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPaypalLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedPaypalLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java index e1538917f233..ad42a0ff31f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Paypal Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalObjectDataset.class, visible = true) -@JsonTypeName("PaypalObject") @Fluent public final class PaypalObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PaypalObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PaypalObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PaypalObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PaypalObjectDataset. + */ + public static PaypalObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PaypalObjectDataset deserializedPaypalObjectDataset = new PaypalObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedPaypalObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPaypalObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedPaypalObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedPaypalObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPaypalObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPaypalObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedPaypalObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedPaypalObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedPaypalObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPaypalObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedPaypalObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java index bde4d655336a..c4777202dc9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PaypalSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Paypal Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PaypalSource.class, visible = true) -@JsonTypeName("PaypalSource") @Fluent public final class PaypalSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PaypalSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public PaypalSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PaypalSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PaypalSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PaypalSource. + */ + public static PaypalSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PaypalSource deserializedPaypalSource = new PaypalSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedPaypalSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedPaypalSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedPaypalSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedPaypalSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedPaypalSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedPaypalSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedPaypalSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedPaypalSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPaypalSource.withAdditionalProperties(additionalProperties); + + return deserializedPaypalSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixAuthenticationType.java index 58a57a549ed3..fa351c4f7a6b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -43,7 +42,6 @@ public PhoenixAuthenticationType() { * @param name a name to look for. * @return the corresponding PhoenixAuthenticationType. */ - @JsonCreator public static PhoenixAuthenticationType fromString(String name) { return fromString(name, PhoenixAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java index d897885d6437..6aa0d67c1e8f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PhoenixLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Phoenix server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixLinkedService.class, visible = true) -@JsonTypeName("Phoenix") @Fluent public final class PhoenixLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Phoenix"; /* * Phoenix server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private PhoenixLinkedServiceTypeProperties innerTypeProperties = new PhoenixLinkedServiceTypeProperties(); /** @@ -407,4 +403,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PhoenixLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PhoenixLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PhoenixLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PhoenixLinkedService. + */ + public static PhoenixLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PhoenixLinkedService deserializedPhoenixLinkedService = new PhoenixLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedPhoenixLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPhoenixLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPhoenixLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPhoenixLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedPhoenixLinkedService.innerTypeProperties + = PhoenixLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedPhoenixLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPhoenixLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedPhoenixLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java index 9fab72ada075..6320b48d3e3a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PhoenixDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Phoenix server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixObjectDataset.class, visible = true) -@JsonTypeName("PhoenixObject") @Fluent public final class PhoenixObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PhoenixObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private PhoenixDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PhoenixObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PhoenixObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PhoenixObjectDataset. + */ + public static PhoenixObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PhoenixObjectDataset deserializedPhoenixObjectDataset = new PhoenixObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedPhoenixObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPhoenixObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedPhoenixObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedPhoenixObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPhoenixObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPhoenixObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedPhoenixObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedPhoenixObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedPhoenixObjectDataset.innerTypeProperties + = PhoenixDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPhoenixObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedPhoenixObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java index f52bf3b58e2a..6ac59c8cb3f3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PhoenixSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Phoenix server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PhoenixSource.class, visible = true) -@JsonTypeName("PhoenixSource") @Fluent public final class PhoenixSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PhoenixSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public PhoenixSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PhoenixSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PhoenixSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PhoenixSource. + */ + public static PhoenixSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PhoenixSource deserializedPhoenixSource = new PhoenixSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedPhoenixSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedPhoenixSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedPhoenixSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedPhoenixSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedPhoenixSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedPhoenixSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedPhoenixSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedPhoenixSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPhoenixSource.withAdditionalProperties(additionalProperties); + + return deserializedPhoenixSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineElapsedTimeMetricPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineElapsedTimeMetricPolicy.java index 6ad115818ebb..bb92a54e21c5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineElapsedTimeMetricPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineElapsedTimeMetricPolicy.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Pipeline ElapsedTime Metric Policy. */ @Fluent -public final class PipelineElapsedTimeMetricPolicy { +public final class PipelineElapsedTimeMetricPolicy implements JsonSerializable { /* * TimeSpan value, after which an Azure Monitoring Metric is fired. */ - @JsonProperty(value = "duration") private Object duration; /** @@ -51,4 +54,41 @@ public PipelineElapsedTimeMetricPolicy withDuration(Object duration) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("duration", this.duration); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineElapsedTimeMetricPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineElapsedTimeMetricPolicy if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelineElapsedTimeMetricPolicy. + */ + public static PipelineElapsedTimeMetricPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineElapsedTimeMetricPolicy deserializedPipelineElapsedTimeMetricPolicy + = new PipelineElapsedTimeMetricPolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("duration".equals(fieldName)) { + deserializedPipelineElapsedTimeMetricPolicy.duration = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineElapsedTimeMetricPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java index c7c7c64547bf..1c7444b6c837 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineExternalComputeScaleProperties.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,29 +17,26 @@ * PipelineExternalComputeScale properties for managed integration runtime. */ @Fluent -public final class PipelineExternalComputeScaleProperties { +public final class PipelineExternalComputeScaleProperties + implements JsonSerializable { /* * Time to live (in minutes) setting of integration runtime which will execute pipeline and external activity. */ - @JsonProperty(value = "timeToLive") private Integer timeToLive; /* * Number of the pipeline nodes, which should be greater than 0 and less than 11. */ - @JsonProperty(value = "numberOfPipelineNodes") private Integer numberOfPipelineNodes; /* * Number of the the external nodes, which should be greater than 0 and less than 11. */ - @JsonProperty(value = "numberOfExternalNodes") private Integer numberOfExternalNodes; /* * PipelineExternalComputeScale properties for managed integration runtime. */ - @JsonIgnore private Map additionalProperties; /** @@ -118,7 +116,6 @@ public PipelineExternalComputeScaleProperties withNumberOfExternalNodes(Integer * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -134,14 +131,6 @@ public PipelineExternalComputeScaleProperties withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -149,4 +138,61 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("timeToLive", this.timeToLive); + jsonWriter.writeNumberField("numberOfPipelineNodes", this.numberOfPipelineNodes); + jsonWriter.writeNumberField("numberOfExternalNodes", this.numberOfExternalNodes); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineExternalComputeScaleProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineExternalComputeScaleProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelineExternalComputeScaleProperties. + */ + public static PipelineExternalComputeScaleProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineExternalComputeScaleProperties deserializedPipelineExternalComputeScaleProperties + = new PipelineExternalComputeScaleProperties(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("timeToLive".equals(fieldName)) { + deserializedPipelineExternalComputeScaleProperties.timeToLive + = reader.getNullable(JsonReader::getInt); + } else if ("numberOfPipelineNodes".equals(fieldName)) { + deserializedPipelineExternalComputeScaleProperties.numberOfPipelineNodes + = reader.getNullable(JsonReader::getInt); + } else if ("numberOfExternalNodes".equals(fieldName)) { + deserializedPipelineExternalComputeScaleProperties.numberOfExternalNodes + = reader.getNullable(JsonReader::getInt); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPipelineExternalComputeScaleProperties.additionalProperties = additionalProperties; + + return deserializedPipelineExternalComputeScaleProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineFolder.java index 77b789ea9223..286bee0e2e04 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineFolder.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. */ @Fluent -public final class PipelineFolder { +public final class PipelineFolder implements JsonSerializable { /* * The name of the folder that this Pipeline is in. */ - @JsonProperty(value = "name") private String name; /** @@ -51,4 +54,40 @@ public PipelineFolder withName(String name) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineFolder from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineFolder if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelineFolder. + */ + public static PipelineFolder fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineFolder deserializedPipelineFolder = new PipelineFolder(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedPipelineFolder.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineFolder; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java index b586c50992a9..bc01952b460f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PipelineResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of pipeline resources. */ @Fluent -public final class PipelineListResponse { +public final class PipelineListResponse implements JsonSerializable { /* * List of pipelines. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PipelineListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineListResponse if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PipelineListResponse. + */ + public static PipelineListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineListResponse deserializedPipelineListResponse = new PipelineListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> PipelineResourceInner.fromJson(reader1)); + deserializedPipelineListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedPipelineListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelinePolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelinePolicy.java index 29cd02b4100e..6c74e3832bd4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelinePolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelinePolicy.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Pipeline Policy. */ @Fluent -public final class PipelinePolicy { +public final class PipelinePolicy implements JsonSerializable { /* * Pipeline ElapsedTime Metric Policy. */ - @JsonProperty(value = "elapsedTimeMetric") private PipelineElapsedTimeMetricPolicy elapsedTimeMetric; /** @@ -54,4 +57,40 @@ public void validate() { elapsedTimeMetric().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("elapsedTimeMetric", this.elapsedTimeMetric); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelinePolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelinePolicy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelinePolicy. + */ + public static PipelinePolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelinePolicy deserializedPipelinePolicy = new PipelinePolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("elapsedTimeMetric".equals(fieldName)) { + deserializedPipelinePolicy.elapsedTimeMetric = PipelineElapsedTimeMetricPolicy.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelinePolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java index aa5fce3f4e80..cd30f02952b6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineReference.java @@ -6,29 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Pipeline reference type. */ @Fluent -public final class PipelineReference { +public final class PipelineReference implements JsonSerializable { /* * Pipeline reference type. */ - @JsonProperty(value = "type", required = true) private String type = "PipelineReference"; /* * Reference pipeline name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /* * Reference name. */ - @JsonProperty(value = "name") private String name; /** @@ -111,4 +112,45 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PipelineReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("referenceName", this.referenceName); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PipelineReference. + */ + public static PipelineReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineReference deserializedPipelineReference = new PipelineReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceName".equals(fieldName)) { + deserializedPipelineReference.referenceName = reader.getString(); + } else if ("name".equals(fieldName)) { + deserializedPipelineReference.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineRunInvokedBy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineRunInvokedBy.java index 9f5ea677df2f..71f1f810a007 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineRunInvokedBy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PipelineRunInvokedBy.java @@ -5,41 +5,40 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Provides entity name and id that started the pipeline run. */ @Immutable -public final class PipelineRunInvokedBy { +public final class PipelineRunInvokedBy implements JsonSerializable { /* * Name of the entity that started the pipeline run. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The ID of the entity that started the run. */ - @JsonProperty(value = "id", access = JsonProperty.Access.WRITE_ONLY) private String id; /* * The type of the entity that started the run. */ - @JsonProperty(value = "invokedByType", access = JsonProperty.Access.WRITE_ONLY) private String invokedByType; /* * The name of the pipeline that triggered the run, if any. */ - @JsonProperty(value = "pipelineName", access = JsonProperty.Access.WRITE_ONLY) private String pipelineName; /* * The run id of the pipeline that triggered the run, if any. */ - @JsonProperty(value = "pipelineRunId", access = JsonProperty.Access.WRITE_ONLY) private String pipelineRunId; /** @@ -100,4 +99,47 @@ public String pipelineRunId() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PipelineRunInvokedBy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PipelineRunInvokedBy if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the PipelineRunInvokedBy. + */ + public static PipelineRunInvokedBy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PipelineRunInvokedBy deserializedPipelineRunInvokedBy = new PipelineRunInvokedBy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedPipelineRunInvokedBy.name = reader.getString(); + } else if ("id".equals(fieldName)) { + deserializedPipelineRunInvokedBy.id = reader.getString(); + } else if ("invokedByType".equals(fieldName)) { + deserializedPipelineRunInvokedBy.invokedByType = reader.getString(); + } else if ("pipelineName".equals(fieldName)) { + deserializedPipelineRunInvokedBy.pipelineName = reader.getString(); + } else if ("pipelineRunId".equals(fieldName)) { + deserializedPipelineRunInvokedBy.pipelineRunId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPipelineRunInvokedBy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java index 7cc11cbba6cd..9f0cab06fd7a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettings.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,38 +17,33 @@ * PolyBase settings. */ @Fluent -public final class PolybaseSettings { +public final class PolybaseSettings implements JsonSerializable { /* * Reject type. */ - @JsonProperty(value = "rejectType") private PolybaseSettingsRejectType rejectType; /* * Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or * Expression with resultType number), minimum: 0. */ - @JsonProperty(value = "rejectValue") private Object rejectValue; /* * Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected * rows. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "rejectSampleValue") private Object rejectSampleValue; /* * Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. * Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useTypeDefault") private Object useTypeDefault; /* * PolyBase settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -147,7 +143,6 @@ public PolybaseSettings withUseTypeDefault(Object useTypeDefault) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -163,14 +158,6 @@ public PolybaseSettings withAdditionalProperties(Map additionalP return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -178,4 +165,60 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("rejectType", this.rejectType == null ? null : this.rejectType.toString()); + jsonWriter.writeUntypedField("rejectValue", this.rejectValue); + jsonWriter.writeUntypedField("rejectSampleValue", this.rejectSampleValue); + jsonWriter.writeUntypedField("useTypeDefault", this.useTypeDefault); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PolybaseSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PolybaseSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PolybaseSettings. + */ + public static PolybaseSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PolybaseSettings deserializedPolybaseSettings = new PolybaseSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("rejectType".equals(fieldName)) { + deserializedPolybaseSettings.rejectType = PolybaseSettingsRejectType.fromString(reader.getString()); + } else if ("rejectValue".equals(fieldName)) { + deserializedPolybaseSettings.rejectValue = reader.readUntyped(); + } else if ("rejectSampleValue".equals(fieldName)) { + deserializedPolybaseSettings.rejectSampleValue = reader.readUntyped(); + } else if ("useTypeDefault".equals(fieldName)) { + deserializedPolybaseSettings.useTypeDefault = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPolybaseSettings.additionalProperties = additionalProperties; + + return deserializedPolybaseSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettingsRejectType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettingsRejectType.java index 8affb877dee6..a8d187267557 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettingsRejectType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PolybaseSettingsRejectType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public PolybaseSettingsRejectType() { * @param name a name to look for. * @return the corresponding PolybaseSettingsRejectType. */ - @JsonCreator public static PolybaseSettingsRejectType fromString(String name) { return fromString(name, PolybaseSettingsRejectType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java index 2ee1bc25dd19..d6c1fa5605b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for PostgreSQL data source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = PostgreSqlLinkedService.class, - visible = true) -@JsonTypeName("PostgreSql") @Fluent public final class PostgreSqlLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSql"; /* * PostgreSQL linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private PostgreSqlLinkedServiceTypeProperties innerTypeProperties = new PostgreSqlLinkedServiceTypeProperties(); /** @@ -190,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PostgreSqlLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlLinkedService. + */ + public static PostgreSqlLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlLinkedService deserializedPostgreSqlLinkedService = new PostgreSqlLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedPostgreSqlLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPostgreSqlLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPostgreSqlLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPostgreSqlLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedPostgreSqlLinkedService.innerTypeProperties + = PostgreSqlLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java index 72417050669a..2f97579788f9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for PostgreSQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlSource.class, visible = true) -@JsonTypeName("PostgreSqlSource") @Fluent public final class PostgreSqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSqlSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public PostgreSqlSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PostgreSqlSource. + */ + public static PostgreSqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlSource deserializedPostgreSqlSource = new PostgreSqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedPostgreSqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedPostgreSqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedPostgreSqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedPostgreSqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedPostgreSqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedPostgreSqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedPostgreSqlSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlSource.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java index 5e2e372c87df..e74fae207b98 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The PostgreSQL table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlTableDataset.class, visible = true) -@JsonTypeName("PostgreSqlTable") @Fluent public final class PostgreSqlTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSqlTable"; /* * PostgreSQL table dataset properties. */ - @JsonProperty(value = "typeProperties") private PostgreSqlTableDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlTableDataset. + */ + public static PostgreSqlTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlTableDataset deserializedPostgreSqlTableDataset = new PostgreSqlTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedPostgreSqlTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPostgreSqlTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedPostgreSqlTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedPostgreSqlTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPostgreSqlTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPostgreSqlTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedPostgreSqlTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedPostgreSqlTableDataset.innerTypeProperties + = PostgreSqlTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java index 027ace8e8a48..c5d448bce03e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for PostgreSQLV2 data source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = PostgreSqlV2LinkedService.class, - visible = true) -@JsonTypeName("PostgreSqlV2") @Fluent public final class PostgreSqlV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSqlV2"; /* * PostgreSQLV2 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private PostgreSqlV2LinkedServiceTypeProperties innerTypeProperties = new PostgreSqlV2LinkedServiceTypeProperties(); /** @@ -572,4 +564,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PostgreSqlV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlV2LinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlV2LinkedService. + */ + public static PostgreSqlV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlV2LinkedService deserializedPostgreSqlV2LinkedService = new PostgreSqlV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedPostgreSqlV2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPostgreSqlV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPostgreSqlV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPostgreSqlV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedPostgreSqlV2LinkedService.innerTypeProperties + = PostgreSqlV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java index 214d38f5ce20..eb219fec3f41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2Source.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for PostgreSQL databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PostgreSqlV2Source.class, visible = true) -@JsonTypeName("PostgreSqlV2Source") @Fluent public final class PostgreSqlV2Source extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSqlV2Source"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public PostgreSqlV2Source withDisableMetricsCollection(Object disableMetricsColl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlV2Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PostgreSqlV2Source. + */ + public static PostgreSqlV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlV2Source deserializedPostgreSqlV2Source = new PostgreSqlV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedPostgreSqlV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedPostgreSqlV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedPostgreSqlV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedPostgreSqlV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedPostgreSqlV2Source.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedPostgreSqlV2Source.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlV2Source.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedPostgreSqlV2Source.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlV2Source.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java index 3beee6ee1e93..9ccef3e63c99 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PostgreSqlV2TableDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2TableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The PostgreSQLV2 table dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = PostgreSqlV2TableDataset.class, - visible = true) -@JsonTypeName("PostgreSqlV2Table") @Fluent public final class PostgreSqlV2TableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PostgreSqlV2Table"; /* * PostgreSQLV2 table dataset properties. */ - @JsonProperty(value = "typeProperties") private PostgreSqlV2TableDatasetTypeProperties innerTypeProperties; /** @@ -183,4 +175,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PostgreSqlV2TableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PostgreSqlV2TableDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PostgreSqlV2TableDataset. + */ + public static PostgreSqlV2TableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PostgreSqlV2TableDataset deserializedPostgreSqlV2TableDataset = new PostgreSqlV2TableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPostgreSqlV2TableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPostgreSqlV2TableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedPostgreSqlV2TableDataset.innerTypeProperties + = PostgreSqlV2TableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPostgreSqlV2TableDataset.withAdditionalProperties(additionalProperties); + + return deserializedPostgreSqlV2TableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySink.java index d8266a347d1f..79aa06e78f0b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySink.java @@ -5,7 +5,10 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Power query sink. @@ -15,7 +18,6 @@ public final class PowerQuerySink extends DataFlowSink { /* * sink script. */ - @JsonProperty(value = "script") private String script; /** @@ -116,4 +118,62 @@ public PowerQuerySink withFlowlet(DataFlowReference flowlet) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeJsonField("dataset", dataset()); + jsonWriter.writeJsonField("linkedService", linkedService()); + jsonWriter.writeJsonField("flowlet", flowlet()); + jsonWriter.writeJsonField("schemaLinkedService", schemaLinkedService()); + jsonWriter.writeJsonField("rejectedDataLinkedService", rejectedDataLinkedService()); + jsonWriter.writeStringField("script", this.script); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PowerQuerySink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PowerQuerySink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PowerQuerySink. + */ + public static PowerQuerySink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PowerQuerySink deserializedPowerQuerySink = new PowerQuerySink(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedPowerQuerySink.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedPowerQuerySink.withDescription(reader.getString()); + } else if ("dataset".equals(fieldName)) { + deserializedPowerQuerySink.withDataset(DatasetReference.fromJson(reader)); + } else if ("linkedService".equals(fieldName)) { + deserializedPowerQuerySink.withLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("flowlet".equals(fieldName)) { + deserializedPowerQuerySink.withFlowlet(DataFlowReference.fromJson(reader)); + } else if ("schemaLinkedService".equals(fieldName)) { + deserializedPowerQuerySink.withSchemaLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("rejectedDataLinkedService".equals(fieldName)) { + deserializedPowerQuerySink.withRejectedDataLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("script".equals(fieldName)) { + deserializedPowerQuerySink.script = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPowerQuerySink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySinkMapping.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySinkMapping.java index 6d2c0b1afbd1..9efe19d1ee0d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySinkMapping.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySinkMapping.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Map Power Query mashup query to sink dataset(s). */ @Fluent -public final class PowerQuerySinkMapping { +public final class PowerQuerySinkMapping implements JsonSerializable { /* * Name of the query in Power Query mashup document. */ - @JsonProperty(value = "queryName") private String queryName; /* * List of sinks mapped to Power Query mashup query. */ - @JsonProperty(value = "dataflowSinks") private List dataflowSinks; /** @@ -81,4 +83,44 @@ public void validate() { dataflowSinks().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("queryName", this.queryName); + jsonWriter.writeArrayField("dataflowSinks", this.dataflowSinks, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PowerQuerySinkMapping from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PowerQuerySinkMapping if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the PowerQuerySinkMapping. + */ + public static PowerQuerySinkMapping fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PowerQuerySinkMapping deserializedPowerQuerySinkMapping = new PowerQuerySinkMapping(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("queryName".equals(fieldName)) { + deserializedPowerQuerySinkMapping.queryName = reader.getString(); + } else if ("dataflowSinks".equals(fieldName)) { + List dataflowSinks = reader.readArray(reader1 -> PowerQuerySink.fromJson(reader1)); + deserializedPowerQuerySinkMapping.dataflowSinks = dataflowSinks; + } else { + reader.skipChildren(); + } + } + + return deserializedPowerQuerySinkMapping; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySource.java index afc84d803439..602a46e64923 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PowerQuerySource.java @@ -5,7 +5,10 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Power query source. @@ -15,7 +18,6 @@ public final class PowerQuerySource extends DataFlowSource { /* * source script. */ - @JsonProperty(value = "script") private String script; /** @@ -107,4 +109,59 @@ public PowerQuerySource withFlowlet(DataFlowReference flowlet) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeJsonField("dataset", dataset()); + jsonWriter.writeJsonField("linkedService", linkedService()); + jsonWriter.writeJsonField("flowlet", flowlet()); + jsonWriter.writeJsonField("schemaLinkedService", schemaLinkedService()); + jsonWriter.writeStringField("script", this.script); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PowerQuerySource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PowerQuerySource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PowerQuerySource. + */ + public static PowerQuerySource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PowerQuerySource deserializedPowerQuerySource = new PowerQuerySource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedPowerQuerySource.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedPowerQuerySource.withDescription(reader.getString()); + } else if ("dataset".equals(fieldName)) { + deserializedPowerQuerySource.withDataset(DatasetReference.fromJson(reader)); + } else if ("linkedService".equals(fieldName)) { + deserializedPowerQuerySource.withLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("flowlet".equals(fieldName)) { + deserializedPowerQuerySource.withFlowlet(DataFlowReference.fromJson(reader)); + } else if ("schemaLinkedService".equals(fieldName)) { + deserializedPowerQuerySource.withSchemaLinkedService(LinkedServiceReference.fromJson(reader)); + } else if ("script".equals(fieldName)) { + deserializedPowerQuerySource.script = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPowerQuerySource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoAuthenticationType.java index 7c80222117fe..1ae2157edd06 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public PrestoAuthenticationType() { * @param name a name to look for. * @return the corresponding PrestoAuthenticationType. */ - @JsonCreator public static PrestoAuthenticationType fromString(String name) { return fromString(name, PrestoAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java index 647c4ff28af2..b3e93665cd48 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PrestoLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Presto server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoLinkedService.class, visible = true) -@JsonTypeName("Presto") @Fluent public final class PrestoLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Presto"; /* * Presto server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private PrestoLinkedServiceTypeProperties innerTypeProperties = new PrestoLinkedServiceTypeProperties(); /** @@ -453,4 +449,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PrestoLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrestoLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrestoLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PrestoLinkedService. + */ + public static PrestoLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrestoLinkedService deserializedPrestoLinkedService = new PrestoLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedPrestoLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPrestoLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPrestoLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPrestoLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedPrestoLinkedService.innerTypeProperties + = PrestoLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedPrestoLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPrestoLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedPrestoLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java index 3e27f4f75948..8b9ca2e3dbc8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PrestoDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Presto server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoObjectDataset.class, visible = true) -@JsonTypeName("PrestoObject") @Fluent public final class PrestoObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PrestoObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private PrestoDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrestoObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrestoObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PrestoObjectDataset. + */ + public static PrestoObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrestoObjectDataset deserializedPrestoObjectDataset = new PrestoObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedPrestoObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedPrestoObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedPrestoObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedPrestoObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedPrestoObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedPrestoObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedPrestoObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedPrestoObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedPrestoObjectDataset.innerTypeProperties = PrestoDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPrestoObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedPrestoObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java index 2f26291fc66b..39c60f2c2657 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrestoSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Presto server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = PrestoSource.class, visible = true) -@JsonTypeName("PrestoSource") @Fluent public final class PrestoSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "PrestoSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public PrestoSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrestoSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrestoSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PrestoSource. + */ + public static PrestoSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrestoSource deserializedPrestoSource = new PrestoSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedPrestoSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedPrestoSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedPrestoSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedPrestoSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedPrestoSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedPrestoSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedPrestoSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedPrestoSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedPrestoSource.withAdditionalProperties(additionalProperties); + + return deserializedPrestoSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpoint.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpoint.java index f78cc43854d7..fb088f26165c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpoint.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpoint.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Private endpoint which a connection belongs to. */ @Fluent -public final class PrivateEndpoint { +public final class PrivateEndpoint implements JsonSerializable { /* * The resource Id for private endpoint */ - @JsonProperty(value = "id") private String id; /** @@ -51,4 +54,40 @@ public PrivateEndpoint withId(String id) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", this.id); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateEndpoint from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateEndpoint if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateEndpoint. + */ + public static PrivateEndpoint fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateEndpoint deserializedPrivateEndpoint = new PrivateEndpoint(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedPrivateEndpoint.id = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateEndpoint; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java index 7dedcf15399e..99217e894786 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateEndpointConnectionListResponse.java @@ -6,25 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PrivateEndpointConnectionResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of linked service resources. */ @Fluent -public final class PrivateEndpointConnectionListResponse { +public final class PrivateEndpointConnectionListResponse + implements JsonSerializable { /* * List of Private Endpoint Connections. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -89,4 +92,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(PrivateEndpointConnectionListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateEndpointConnectionListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateEndpointConnectionListResponse if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the PrivateEndpointConnectionListResponse. + */ + public static PrivateEndpointConnectionListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateEndpointConnectionListResponse deserializedPrivateEndpointConnectionListResponse + = new PrivateEndpointConnectionListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> PrivateEndpointConnectionResourceInner.fromJson(reader1)); + deserializedPrivateEndpointConnectionListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedPrivateEndpointConnectionListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateEndpointConnectionListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequest.java index d8db2e0c6ffd..798e273f3e50 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequest.java @@ -5,23 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A request to approve or reject a private endpoint connection. */ @Fluent -public final class PrivateLinkConnectionApprovalRequest { +public final class PrivateLinkConnectionApprovalRequest + implements JsonSerializable { /* * The state of a private link connection */ - @JsonProperty(value = "privateLinkServiceConnectionState") private PrivateLinkConnectionState privateLinkServiceConnectionState; /* * The resource of private endpoint. */ - @JsonProperty(value = "privateEndpoint") private PrivateEndpoint privateEndpoint; /** @@ -84,4 +87,45 @@ public void validate() { privateEndpoint().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("privateLinkServiceConnectionState", this.privateLinkServiceConnectionState); + jsonWriter.writeJsonField("privateEndpoint", this.privateEndpoint); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkConnectionApprovalRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkConnectionApprovalRequest if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateLinkConnectionApprovalRequest. + */ + public static PrivateLinkConnectionApprovalRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkConnectionApprovalRequest deserializedPrivateLinkConnectionApprovalRequest + = new PrivateLinkConnectionApprovalRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("privateLinkServiceConnectionState".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequest.privateLinkServiceConnectionState + = PrivateLinkConnectionState.fromJson(reader); + } else if ("privateEndpoint".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequest.privateEndpoint = PrivateEndpoint.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkConnectionApprovalRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequestResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequestResource.java index c3a7e8c26725..7702bcee90b3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequestResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionApprovalRequestResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Private Endpoint Connection Approval ARM resource. @@ -16,25 +19,21 @@ public final class PrivateLinkConnectionApprovalRequestResource extends SubResou /* * Core resource properties */ - @JsonProperty(value = "properties") private PrivateLinkConnectionApprovalRequest properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -110,4 +109,51 @@ public void validate() { properties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkConnectionApprovalRequestResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkConnectionApprovalRequestResource if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateLinkConnectionApprovalRequestResource. + */ + public static PrivateLinkConnectionApprovalRequestResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkConnectionApprovalRequestResource deserializedPrivateLinkConnectionApprovalRequestResource + = new PrivateLinkConnectionApprovalRequestResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequestResource.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequestResource.properties + = PrivateLinkConnectionApprovalRequest.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequestResource.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequestResource.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedPrivateLinkConnectionApprovalRequestResource.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkConnectionApprovalRequestResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionState.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionState.java index dda42f806072..f4a8a01e9620 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionState.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkConnectionState.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The state of a private link connection. */ @Fluent -public final class PrivateLinkConnectionState { +public final class PrivateLinkConnectionState implements JsonSerializable { /* * Status of a private link connection */ - @JsonProperty(value = "status") private String status; /* * Description of a private link connection */ - @JsonProperty(value = "description") private String description; /* * ActionsRequired for a private link connection */ - @JsonProperty(value = "actionsRequired") private String actionsRequired; /** @@ -103,4 +104,46 @@ public PrivateLinkConnectionState withActionsRequired(String actionsRequired) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("status", this.status); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeStringField("actionsRequired", this.actionsRequired); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkConnectionState from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkConnectionState if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateLinkConnectionState. + */ + public static PrivateLinkConnectionState fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkConnectionState deserializedPrivateLinkConnectionState = new PrivateLinkConnectionState(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("status".equals(fieldName)) { + deserializedPrivateLinkConnectionState.status = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedPrivateLinkConnectionState.description = reader.getString(); + } else if ("actionsRequired".equals(fieldName)) { + deserializedPrivateLinkConnectionState.actionsRequired = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkConnectionState; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResource.java index 1c8433a6d27a..6e8100a0b3c7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResource.java @@ -6,7 +6,10 @@ import com.azure.core.annotation.Fluent; import com.azure.core.management.SubResource; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A private link resource. @@ -16,25 +19,21 @@ public final class PrivateLinkResource extends SubResource { /* * Core resource properties */ - @JsonProperty(value = "properties") private PrivateLinkResourceProperties properties; /* * The resource name. */ - @JsonProperty(value = "name", access = JsonProperty.Access.WRITE_ONLY) private String name; /* * The resource type. */ - @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /* * Etag identifies change in the resource. */ - @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** @@ -109,4 +108,49 @@ public void validate() { properties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("id", id()); + jsonWriter.writeJsonField("properties", this.properties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkResource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateLinkResource. + */ + public static PrivateLinkResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkResource deserializedPrivateLinkResource = new PrivateLinkResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedPrivateLinkResource.withId(reader.getString()); + } else if ("properties".equals(fieldName)) { + deserializedPrivateLinkResource.properties = PrivateLinkResourceProperties.fromJson(reader); + } else if ("name".equals(fieldName)) { + deserializedPrivateLinkResource.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedPrivateLinkResource.type = reader.getString(); + } else if ("etag".equals(fieldName)) { + deserializedPrivateLinkResource.etag = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResourceProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResourceProperties.java index 67fec5de27e5..6e973b7a1b7f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResourceProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PrivateLinkResourceProperties.java @@ -5,30 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Properties of a private link resource. */ @Immutable -public final class PrivateLinkResourceProperties { +public final class PrivateLinkResourceProperties implements JsonSerializable { /* * GroupId of a private link resource */ - @JsonProperty(value = "groupId", access = JsonProperty.Access.WRITE_ONLY) private String groupId; /* * RequiredMembers of a private link resource */ - @JsonProperty(value = "requiredMembers", access = JsonProperty.Access.WRITE_ONLY) private List requiredMembers; /* * RequiredZoneNames of a private link resource */ - @JsonProperty(value = "requiredZoneNames", access = JsonProperty.Access.WRITE_ONLY) private List requiredZoneNames; /** @@ -71,4 +72,46 @@ public List requiredZoneNames() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PrivateLinkResourceProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PrivateLinkResourceProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the PrivateLinkResourceProperties. + */ + public static PrivateLinkResourceProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PrivateLinkResourceProperties deserializedPrivateLinkResourceProperties + = new PrivateLinkResourceProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("groupId".equals(fieldName)) { + deserializedPrivateLinkResourceProperties.groupId = reader.getString(); + } else if ("requiredMembers".equals(fieldName)) { + List requiredMembers = reader.readArray(reader1 -> reader1.getString()); + deserializedPrivateLinkResourceProperties.requiredMembers = requiredMembers; + } else if ("requiredZoneNames".equals(fieldName)) { + List requiredZoneNames = reader.readArray(reader1 -> reader1.getString()); + deserializedPrivateLinkResourceProperties.requiredZoneNames = requiredZoneNames; + } else { + reader.skipChildren(); + } + } + + return deserializedPrivateLinkResourceProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PublicNetworkAccess.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PublicNetworkAccess.java index 6a5254527328..d53cf822972c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PublicNetworkAccess.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PublicNetworkAccess.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public PublicNetworkAccess() { * @param name a name to look for. * @return the corresponding PublicNetworkAccess. */ - @JsonCreator public static PublicNetworkAccess fromString(String name) { return fromString(name, PublicNetworkAccess.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PurviewConfiguration.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PurviewConfiguration.java index e622b0f87783..0f173d73792c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PurviewConfiguration.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/PurviewConfiguration.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Purview configuration. */ @Fluent -public final class PurviewConfiguration { +public final class PurviewConfiguration implements JsonSerializable { /* * Purview resource id. */ - @JsonProperty(value = "purviewResourceId") private String purviewResourceId; /** @@ -51,4 +54,40 @@ public PurviewConfiguration withPurviewResourceId(String purviewResourceId) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("purviewResourceId", this.purviewResourceId); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PurviewConfiguration from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PurviewConfiguration if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the PurviewConfiguration. + */ + public static PurviewConfiguration fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PurviewConfiguration deserializedPurviewConfiguration = new PurviewConfiguration(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("purviewResourceId".equals(fieldName)) { + deserializedPurviewConfiguration.purviewResourceId = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedPurviewConfiguration; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QueryDataFlowDebugSessionsResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QueryDataFlowDebugSessionsResponse.java index 29336dac966a..9dd87b454ceb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QueryDataFlowDebugSessionsResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QueryDataFlowDebugSessionsResponse.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugSessionInfoInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of active debug sessions. */ @Fluent -public final class QueryDataFlowDebugSessionsResponse { +public final class QueryDataFlowDebugSessionsResponse implements JsonSerializable { /* * Array with all active debug sessions. */ - @JsonProperty(value = "value") private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -82,4 +84,46 @@ public void validate() { value().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QueryDataFlowDebugSessionsResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QueryDataFlowDebugSessionsResponse if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the QueryDataFlowDebugSessionsResponse. + */ + public static QueryDataFlowDebugSessionsResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QueryDataFlowDebugSessionsResponse deserializedQueryDataFlowDebugSessionsResponse + = new QueryDataFlowDebugSessionsResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> DataFlowDebugSessionInfoInner.fromJson(reader1)); + deserializedQueryDataFlowDebugSessionsResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedQueryDataFlowDebugSessionsResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedQueryDataFlowDebugSessionsResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java index e93459019e52..53268ee57ad9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.QuickBooksLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * QuickBooks server linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = QuickBooksLinkedService.class, - visible = true) -@JsonTypeName("QuickBooks") @Fluent public final class QuickBooksLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "QuickBooks"; /* * QuickBooks server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private QuickBooksLinkedServiceTypeProperties innerTypeProperties = new QuickBooksLinkedServiceTypeProperties(); /** @@ -330,4 +322,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(QuickBooksLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickBooksLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickBooksLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the QuickBooksLinkedService. + */ + public static QuickBooksLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickBooksLinkedService deserializedQuickBooksLinkedService = new QuickBooksLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedQuickBooksLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedQuickBooksLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedQuickBooksLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedQuickBooksLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedQuickBooksLinkedService.innerTypeProperties + = QuickBooksLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedQuickBooksLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedQuickBooksLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedQuickBooksLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java index f6c9251fd867..a9323b16681d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * QuickBooks server dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = QuickBooksObjectDataset.class, - visible = true) -@JsonTypeName("QuickBooksObject") @Fluent public final class QuickBooksObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "QuickBooksObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickBooksObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickBooksObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the QuickBooksObjectDataset. + */ + public static QuickBooksObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickBooksObjectDataset deserializedQuickBooksObjectDataset = new QuickBooksObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedQuickBooksObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedQuickBooksObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedQuickBooksObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedQuickBooksObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedQuickBooksObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedQuickBooksObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedQuickBooksObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedQuickBooksObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedQuickBooksObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedQuickBooksObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedQuickBooksObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java index 13e4eefbd661..803c70159b80 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickBooksSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity QuickBooks server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = QuickBooksSource.class, visible = true) -@JsonTypeName("QuickBooksSource") @Fluent public final class QuickBooksSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "QuickBooksSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public QuickBooksSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickBooksSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickBooksSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the QuickBooksSource. + */ + public static QuickBooksSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickBooksSource deserializedQuickBooksSource = new QuickBooksSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedQuickBooksSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedQuickBooksSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedQuickBooksSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedQuickBooksSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedQuickBooksSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedQuickBooksSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedQuickBooksSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedQuickBooksSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedQuickBooksSource.withAdditionalProperties(additionalProperties); + + return deserializedQuickBooksSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java index 92b0747d8ab4..a1dfdd1ceed8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/QuickbaseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.QuickbaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Quickbase. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = QuickbaseLinkedService.class, visible = true) -@JsonTypeName("Quickbase") @Fluent public final class QuickbaseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Quickbase"; /* * Quickbase linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private QuickbaseLinkedServiceTypeProperties innerTypeProperties = new QuickbaseLinkedServiceTypeProperties(); /** @@ -184,4 +180,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(QuickbaseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of QuickbaseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of QuickbaseLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the QuickbaseLinkedService. + */ + public static QuickbaseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + QuickbaseLinkedService deserializedQuickbaseLinkedService = new QuickbaseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedQuickbaseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedQuickbaseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedQuickbaseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedQuickbaseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedQuickbaseLinkedService.innerTypeProperties + = QuickbaseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedQuickbaseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedQuickbaseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedQuickbaseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceFrequency.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceFrequency.java index ee23247fb034..f80d7d1cfca6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceFrequency.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceFrequency.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -62,7 +61,6 @@ public RecurrenceFrequency() { * @param name a name to look for. * @return the corresponding RecurrenceFrequency. */ - @JsonCreator public static RecurrenceFrequency fromString(String name) { return fromString(name, RecurrenceFrequency.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceSchedule.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceSchedule.java index 43385048eed1..f87b9e8ecc25 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceSchedule.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceSchedule.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -17,41 +18,35 @@ * The recurrence schedule. */ @Fluent -public final class RecurrenceSchedule { +public final class RecurrenceSchedule implements JsonSerializable { /* * The minutes. */ - @JsonProperty(value = "minutes") private List minutes; /* * The hours. */ - @JsonProperty(value = "hours") private List hours; /* * The days of the week. */ - @JsonProperty(value = "weekDays") private List weekDays; /* * The month days. */ - @JsonProperty(value = "monthDays") private List monthDays; /* * The monthly occurrences. */ - @JsonProperty(value = "monthlyOccurrences") private List monthlyOccurrences; /* * The recurrence schedule. */ - @JsonIgnore private Map additionalProperties; /** @@ -165,7 +160,6 @@ public RecurrenceSchedule withMonthlyOccurrences(List additionalProperties() { return this.additionalProperties; } @@ -181,14 +175,6 @@ public RecurrenceSchedule withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -199,4 +185,71 @@ public void validate() { monthlyOccurrences().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("minutes", this.minutes, (writer, element) -> writer.writeInt(element)); + jsonWriter.writeArrayField("hours", this.hours, (writer, element) -> writer.writeInt(element)); + jsonWriter.writeArrayField("weekDays", this.weekDays, + (writer, element) -> writer.writeString(element == null ? null : element.toString())); + jsonWriter.writeArrayField("monthDays", this.monthDays, (writer, element) -> writer.writeInt(element)); + jsonWriter.writeArrayField("monthlyOccurrences", this.monthlyOccurrences, + (writer, element) -> writer.writeJson(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RecurrenceSchedule from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RecurrenceSchedule if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the RecurrenceSchedule. + */ + public static RecurrenceSchedule fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RecurrenceSchedule deserializedRecurrenceSchedule = new RecurrenceSchedule(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("minutes".equals(fieldName)) { + List minutes = reader.readArray(reader1 -> reader1.getInt()); + deserializedRecurrenceSchedule.minutes = minutes; + } else if ("hours".equals(fieldName)) { + List hours = reader.readArray(reader1 -> reader1.getInt()); + deserializedRecurrenceSchedule.hours = hours; + } else if ("weekDays".equals(fieldName)) { + List weekDays = reader.readArray(reader1 -> DaysOfWeek.fromString(reader1.getString())); + deserializedRecurrenceSchedule.weekDays = weekDays; + } else if ("monthDays".equals(fieldName)) { + List monthDays = reader.readArray(reader1 -> reader1.getInt()); + deserializedRecurrenceSchedule.monthDays = monthDays; + } else if ("monthlyOccurrences".equals(fieldName)) { + List monthlyOccurrences + = reader.readArray(reader1 -> RecurrenceScheduleOccurrence.fromJson(reader1)); + deserializedRecurrenceSchedule.monthlyOccurrences = monthlyOccurrences; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRecurrenceSchedule.additionalProperties = additionalProperties; + + return deserializedRecurrenceSchedule; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceScheduleOccurrence.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceScheduleOccurrence.java index 5fd9ea43fb4f..4eb65da38413 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceScheduleOccurrence.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RecurrenceScheduleOccurrence.java @@ -5,10 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -16,23 +17,20 @@ * The recurrence schedule occurrence. */ @Fluent -public final class RecurrenceScheduleOccurrence { +public final class RecurrenceScheduleOccurrence implements JsonSerializable { /* * The day of the week. */ - @JsonProperty(value = "day") private DayOfWeek day; /* * The occurrence. */ - @JsonProperty(value = "occurrence") private Integer occurrence; /* * The recurrence schedule occurrence. */ - @JsonIgnore private Map additionalProperties; /** @@ -86,7 +84,6 @@ public RecurrenceScheduleOccurrence withOccurrence(Integer occurrence) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -102,14 +99,6 @@ public RecurrenceScheduleOccurrence withAdditionalProperties(Map return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -117,4 +106,54 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("day", this.day == null ? null : this.day.toString()); + jsonWriter.writeNumberField("occurrence", this.occurrence); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RecurrenceScheduleOccurrence from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RecurrenceScheduleOccurrence if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the RecurrenceScheduleOccurrence. + */ + public static RecurrenceScheduleOccurrence fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RecurrenceScheduleOccurrence deserializedRecurrenceScheduleOccurrence = new RecurrenceScheduleOccurrence(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("day".equals(fieldName)) { + deserializedRecurrenceScheduleOccurrence.day = DayOfWeek.fromString(reader.getString()); + } else if ("occurrence".equals(fieldName)) { + deserializedRecurrenceScheduleOccurrence.occurrence = reader.getNullable(JsonReader::getInt); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRecurrenceScheduleOccurrence.additionalProperties = additionalProperties; + + return deserializedRecurrenceScheduleOccurrence; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java index d128fadbcbf0..e2fda111712c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedirectIncompatibleRowSettings.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -17,25 +18,22 @@ * Redirect incompatible row settings. */ @Fluent -public final class RedirectIncompatibleRowSettings { +public final class RedirectIncompatibleRowSettings implements JsonSerializable { /* * Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible * row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "linkedServiceName", required = true) private Object linkedServiceName; /* * The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path") private Object path; /* * Redirect incompatible row settings */ - @JsonIgnore private Map additionalProperties; /** @@ -95,7 +93,6 @@ public RedirectIncompatibleRowSettings withPath(Object path) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -111,14 +108,6 @@ public RedirectIncompatibleRowSettings withAdditionalProperties(Map(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -133,4 +122,56 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RedirectIncompatibleRowSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeUntypedField("path", this.path); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RedirectIncompatibleRowSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RedirectIncompatibleRowSettings if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RedirectIncompatibleRowSettings. + */ + public static RedirectIncompatibleRowSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RedirectIncompatibleRowSettings deserializedRedirectIncompatibleRowSettings + = new RedirectIncompatibleRowSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedRedirectIncompatibleRowSettings.linkedServiceName = reader.readUntyped(); + } else if ("path".equals(fieldName)) { + deserializedRedirectIncompatibleRowSettings.path = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRedirectIncompatibleRowSettings.additionalProperties = additionalProperties; + + return deserializedRedirectIncompatibleRowSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java index 1ddf52339858..41d71e658cee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RedshiftUnloadSettings.java @@ -6,7 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, @@ -14,12 +18,11 @@ * interim S3. */ @Fluent -public final class RedshiftUnloadSettings { +public final class RedshiftUnloadSettings implements JsonSerializable { /* * The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon * Redshift source. */ - @JsonProperty(value = "s3LinkedServiceName", required = true) private LinkedServiceReference s3LinkedServiceName; /* @@ -27,7 +30,6 @@ public final class RedshiftUnloadSettings { * The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "bucketName", required = true) private Object bucketName; /** @@ -103,4 +105,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RedshiftUnloadSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("s3LinkedServiceName", this.s3LinkedServiceName); + jsonWriter.writeUntypedField("bucketName", this.bucketName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RedshiftUnloadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RedshiftUnloadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RedshiftUnloadSettings. + */ + public static RedshiftUnloadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RedshiftUnloadSettings deserializedRedshiftUnloadSettings = new RedshiftUnloadSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("s3LinkedServiceName".equals(fieldName)) { + deserializedRedshiftUnloadSettings.s3LinkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("bucketName".equals(fieldName)) { + deserializedRedshiftUnloadSettings.bucketName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedRedshiftUnloadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java index 407e5fd45cf4..edd662aadf3f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for various relational databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RelationalSource.class, visible = true) -@JsonTypeName("RelationalSource") @Fluent public final class RelationalSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RelationalSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -140,4 +136,69 @@ public RelationalSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RelationalSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RelationalSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the RelationalSource. + */ + public static RelationalSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RelationalSource deserializedRelationalSource = new RelationalSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedRelationalSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedRelationalSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedRelationalSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedRelationalSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedRelationalSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedRelationalSource.query = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedRelationalSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRelationalSource.withAdditionalProperties(additionalProperties); + + return deserializedRelationalSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java index 82dd0bbc1159..25bb06e02755 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RelationalTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.RelationalTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The relational table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RelationalTableDataset.class, visible = true) -@JsonTypeName("RelationalTable") @Fluent public final class RelationalTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RelationalTable"; /* * Relational table dataset properties. */ - @JsonProperty(value = "typeProperties") private RelationalTableDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RelationalTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RelationalTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RelationalTableDataset. + */ + public static RelationalTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RelationalTableDataset deserializedRelationalTableDataset = new RelationalTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedRelationalTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedRelationalTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedRelationalTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedRelationalTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedRelationalTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedRelationalTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedRelationalTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedRelationalTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedRelationalTableDataset.innerTypeProperties + = RelationalTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRelationalTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedRelationalTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RemotePrivateEndpointConnection.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RemotePrivateEndpointConnection.java index e0d5231d7c7b..24804028019a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RemotePrivateEndpointConnection.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RemotePrivateEndpointConnection.java @@ -5,29 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A remote private endpoint connection. */ @Fluent -public final class RemotePrivateEndpointConnection { +public final class RemotePrivateEndpointConnection implements JsonSerializable { /* * The provisioningState property. */ - @JsonProperty(value = "provisioningState", access = JsonProperty.Access.WRITE_ONLY) private String provisioningState; /* * PrivateEndpoint of a remote private endpoint connection */ - @JsonProperty(value = "privateEndpoint") private ArmIdWrapper privateEndpoint; /* * The state of a private link connection */ - @JsonProperty(value = "privateLinkServiceConnectionState") private PrivateLinkConnectionState privateLinkServiceConnectionState; /** @@ -99,4 +100,47 @@ public void validate() { privateLinkServiceConnectionState().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("privateEndpoint", this.privateEndpoint); + jsonWriter.writeJsonField("privateLinkServiceConnectionState", this.privateLinkServiceConnectionState); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RemotePrivateEndpointConnection from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RemotePrivateEndpointConnection if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the RemotePrivateEndpointConnection. + */ + public static RemotePrivateEndpointConnection fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RemotePrivateEndpointConnection deserializedRemotePrivateEndpointConnection + = new RemotePrivateEndpointConnection(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("provisioningState".equals(fieldName)) { + deserializedRemotePrivateEndpointConnection.provisioningState = reader.getString(); + } else if ("privateEndpoint".equals(fieldName)) { + deserializedRemotePrivateEndpointConnection.privateEndpoint = ArmIdWrapper.fromJson(reader); + } else if ("privateLinkServiceConnectionState".equals(fieldName)) { + deserializedRemotePrivateEndpointConnection.privateLinkServiceConnectionState + = PrivateLinkConnectionState.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedRemotePrivateEndpointConnection; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java index 4830c4200a4f..34e213bee683 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RerunTumblingWindowTrigger.java @@ -6,40 +6,38 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.RerunTumblingWindowTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.time.OffsetDateTime; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested * end time. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = RerunTumblingWindowTrigger.class, - visible = true) -@JsonTypeName("RerunTumblingWindowTrigger") @Fluent public final class RerunTumblingWindowTrigger extends Trigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RerunTumblingWindowTrigger"; /* * Rerun Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private RerunTumblingWindowTriggerTypeProperties innerTypeProperties = new RerunTumblingWindowTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of RerunTumblingWindowTrigger class. */ @@ -65,6 +63,17 @@ private RerunTumblingWindowTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -199,4 +208,66 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RerunTumblingWindowTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RerunTumblingWindowTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RerunTumblingWindowTrigger if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RerunTumblingWindowTrigger. + */ + public static RerunTumblingWindowTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RerunTumblingWindowTrigger deserializedRerunTumblingWindowTrigger = new RerunTumblingWindowTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedRerunTumblingWindowTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedRerunTumblingWindowTrigger.runtimeState + = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedRerunTumblingWindowTrigger.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedRerunTumblingWindowTrigger.innerTypeProperties + = RerunTumblingWindowTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedRerunTumblingWindowTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRerunTumblingWindowTrigger.withAdditionalProperties(additionalProperties); + + return deserializedRerunTumblingWindowTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java index 1dab3b3f7d8e..f987ff9e685e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ResponsysLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Responsys linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysLinkedService.class, visible = true) -@JsonTypeName("Responsys") @Fluent public final class ResponsysLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Responsys"; /* * Responsys linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ResponsysLinkedServiceTypeProperties innerTypeProperties = new ResponsysLinkedServiceTypeProperties(); /** @@ -288,4 +284,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ResponsysLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ResponsysLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ResponsysLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ResponsysLinkedService. + */ + public static ResponsysLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ResponsysLinkedService deserializedResponsysLinkedService = new ResponsysLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedResponsysLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedResponsysLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedResponsysLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedResponsysLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedResponsysLinkedService.innerTypeProperties + = ResponsysLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedResponsysLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedResponsysLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedResponsysLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java index 1ddd04e3b5d4..93cbf7e22513 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Responsys dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysObjectDataset.class, visible = true) -@JsonTypeName("ResponsysObject") @Fluent public final class ResponsysObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ResponsysObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ResponsysObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ResponsysObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ResponsysObjectDataset. + */ + public static ResponsysObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ResponsysObjectDataset deserializedResponsysObjectDataset = new ResponsysObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedResponsysObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedResponsysObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedResponsysObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedResponsysObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedResponsysObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedResponsysObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedResponsysObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedResponsysObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedResponsysObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedResponsysObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedResponsysObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java index 9d39e963d92c..2a82ff37eefd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ResponsysSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Responsys source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ResponsysSource.class, visible = true) -@JsonTypeName("ResponsysSource") @Fluent public final class ResponsysSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ResponsysSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ResponsysSource withDisableMetricsCollection(Object disableMetricsCollect public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ResponsysSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ResponsysSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ResponsysSource. + */ + public static ResponsysSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ResponsysSource deserializedResponsysSource = new ResponsysSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedResponsysSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedResponsysSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedResponsysSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedResponsysSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedResponsysSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedResponsysSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedResponsysSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedResponsysSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedResponsysSource.withAdditionalProperties(additionalProperties); + + return deserializedResponsysSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java index bd23de94a7eb..4f52fdb0cbff 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestResourceDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.RestResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * A Rest service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestResourceDataset.class, visible = true) -@JsonTypeName("RestResource") @Fluent public final class RestResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RestResource"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private RestResourceDatasetTypeProperties innerTypeProperties; /** @@ -254,4 +250,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestResourceDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RestResourceDataset. + */ + public static RestResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestResourceDataset deserializedRestResourceDataset = new RestResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedRestResourceDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedRestResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedRestResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedRestResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedRestResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedRestResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedRestResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedRestResourceDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedRestResourceDataset.innerTypeProperties + = RestResourceDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRestResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedRestResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceAuthenticationType.java index 91a97a5afaa2..9ca30973a25f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -52,7 +51,6 @@ public RestServiceAuthenticationType() { * @param name a name to look for. * @return the corresponding RestServiceAuthenticationType. */ - @JsonCreator public static RestServiceAuthenticationType fromString(String name) { return fromString(name, RestServiceAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java index 6bfa27877988..4745ab4a451f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestServiceLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.RestServiceLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Rest Service linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = RestServiceLinkedService.class, - visible = true) -@JsonTypeName("RestService") @Fluent public final class RestServiceLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RestService"; /* * Rest Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private RestServiceLinkedServiceTypeProperties innerTypeProperties = new RestServiceLinkedServiceTypeProperties(); /** @@ -559,4 +551,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RestServiceLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestServiceLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestServiceLinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RestServiceLinkedService. + */ + public static RestServiceLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestServiceLinkedService deserializedRestServiceLinkedService = new RestServiceLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedRestServiceLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedRestServiceLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedRestServiceLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedRestServiceLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedRestServiceLinkedService.innerTypeProperties + = RestServiceLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedRestServiceLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRestServiceLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedRestServiceLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java index 4b8ff3fe4fc3..f61605ed0875 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSink.java @@ -5,37 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Rest service Sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestSink.class, visible = true) -@JsonTypeName("RestSink") @Fluent public final class RestSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RestSink"; /* * The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestMethod") private Object requestMethod; /* * The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string * type). */ - @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* @@ -43,20 +39,17 @@ public final class RestSink extends CopySink { * response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* * The time to await before sending next request, in milliseconds */ - @JsonProperty(value = "requestInterval") private Object requestInterval; /* * Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The * Only Supported option is Gzip. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "httpCompressionType") private Object httpCompressionType; /** @@ -250,4 +243,84 @@ public RestSink withDisableMetricsCollection(Object disableMetricsCollection) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("requestMethod", this.requestMethod); + jsonWriter.writeUntypedField("additionalHeaders", this.additionalHeaders); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + jsonWriter.writeUntypedField("requestInterval", this.requestInterval); + jsonWriter.writeUntypedField("httpCompressionType", this.httpCompressionType); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the RestSink. + */ + public static RestSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestSink deserializedRestSink = new RestSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedRestSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedRestSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedRestSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedRestSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedRestSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedRestSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedRestSink.type = reader.getString(); + } else if ("requestMethod".equals(fieldName)) { + deserializedRestSink.requestMethod = reader.readUntyped(); + } else if ("additionalHeaders".equals(fieldName)) { + deserializedRestSink.additionalHeaders = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedRestSink.httpRequestTimeout = reader.readUntyped(); + } else if ("requestInterval".equals(fieldName)) { + deserializedRestSink.requestInterval = reader.readUntyped(); + } else if ("httpCompressionType".equals(fieldName)) { + deserializedRestSink.httpCompressionType = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRestSink.withAdditionalProperties(additionalProperties); + + return deserializedRestSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java index 0896659905fd..eca78cd0ddc4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RestSource.java @@ -5,50 +5,44 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Rest service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RestSource.class, visible = true) -@JsonTypeName("RestSource") @Fluent public final class RestSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "RestSource"; /* * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestMethod") private Object requestMethod; /* * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "requestBody") private Object requestBody; /* * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "additionalHeaders") private Object additionalHeaders; /* * The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "paginationRules") private Object paginationRules; /* @@ -56,19 +50,16 @@ public final class RestSource extends CopySource { * response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /* * The time to await before sending next page request. */ - @JsonProperty(value = "requestInterval") private Object requestInterval; /* * Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -286,4 +277,84 @@ public RestSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("requestMethod", this.requestMethod); + jsonWriter.writeUntypedField("requestBody", this.requestBody); + jsonWriter.writeUntypedField("additionalHeaders", this.additionalHeaders); + jsonWriter.writeUntypedField("paginationRules", this.paginationRules); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + jsonWriter.writeUntypedField("requestInterval", this.requestInterval); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RestSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RestSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the RestSource. + */ + public static RestSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RestSource deserializedRestSource = new RestSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedRestSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedRestSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedRestSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedRestSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedRestSource.type = reader.getString(); + } else if ("requestMethod".equals(fieldName)) { + deserializedRestSource.requestMethod = reader.readUntyped(); + } else if ("requestBody".equals(fieldName)) { + deserializedRestSource.requestBody = reader.readUntyped(); + } else if ("additionalHeaders".equals(fieldName)) { + deserializedRestSource.additionalHeaders = reader.readUntyped(); + } else if ("paginationRules".equals(fieldName)) { + deserializedRestSource.paginationRules = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedRestSource.httpRequestTimeout = reader.readUntyped(); + } else if ("requestInterval".equals(fieldName)) { + deserializedRestSource.requestInterval = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedRestSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedRestSource.withAdditionalProperties(additionalProperties); + + return deserializedRestSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java index 790f21bba951..efdc45a815dd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RetryPolicy.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Execution policy for an activity. */ @Fluent -public final class RetryPolicy { +public final class RetryPolicy implements JsonSerializable { /* * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. */ - @JsonProperty(value = "count") private Object count; /* * Interval between retries in seconds. Default is 30. */ - @JsonProperty(value = "intervalInSeconds") private Integer intervalInSeconds; /** @@ -79,4 +81,43 @@ public RetryPolicy withIntervalInSeconds(Integer intervalInSeconds) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("count", this.count); + jsonWriter.writeNumberField("intervalInSeconds", this.intervalInSeconds); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RetryPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RetryPolicy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the RetryPolicy. + */ + public static RetryPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RetryPolicy deserializedRetryPolicy = new RetryPolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("count".equals(fieldName)) { + deserializedRetryPolicy.count = reader.readUntyped(); + } else if ("intervalInSeconds".equals(fieldName)) { + deserializedRetryPolicy.intervalInSeconds = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedRetryPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java index 5561c49e135f..1e255c399978 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunFilterParameters.java @@ -5,44 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.core.util.CoreUtils; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; import java.util.List; /** * Query parameters for listing runs. */ @Fluent -public final class RunFilterParameters { +public final class RunFilterParameters implements JsonSerializable { /* * The continuation token for getting the next page of results. Null for first page. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /* * The time at or after which the run event was updated in 'ISO 8601' format. */ - @JsonProperty(value = "lastUpdatedAfter", required = true) private OffsetDateTime lastUpdatedAfter; /* * The time at or before which the run event was updated in 'ISO 8601' format. */ - @JsonProperty(value = "lastUpdatedBefore", required = true) private OffsetDateTime lastUpdatedBefore; /* * List of filters. */ - @JsonProperty(value = "filters") private List filters; /* * List of OrderBy option. */ - @JsonProperty(value = "orderBy") private List orderBy; /** @@ -178,4 +179,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RunFilterParameters.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("lastUpdatedAfter", + this.lastUpdatedAfter == null + ? null + : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.lastUpdatedAfter)); + jsonWriter.writeStringField("lastUpdatedBefore", + this.lastUpdatedBefore == null + ? null + : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.lastUpdatedBefore)); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + jsonWriter.writeArrayField("filters", this.filters, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("orderBy", this.orderBy, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RunFilterParameters from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RunFilterParameters if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RunFilterParameters. + */ + public static RunFilterParameters fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RunFilterParameters deserializedRunFilterParameters = new RunFilterParameters(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("lastUpdatedAfter".equals(fieldName)) { + deserializedRunFilterParameters.lastUpdatedAfter = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("lastUpdatedBefore".equals(fieldName)) { + deserializedRunFilterParameters.lastUpdatedBefore = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("continuationToken".equals(fieldName)) { + deserializedRunFilterParameters.continuationToken = reader.getString(); + } else if ("filters".equals(fieldName)) { + List filters = reader.readArray(reader1 -> RunQueryFilter.fromJson(reader1)); + deserializedRunFilterParameters.filters = filters; + } else if ("orderBy".equals(fieldName)) { + List orderBy = reader.readArray(reader1 -> RunQueryOrderBy.fromJson(reader1)); + deserializedRunFilterParameters.orderBy = orderBy; + } else { + reader.skipChildren(); + } + } + + return deserializedRunFilterParameters; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java index 6adae1568444..01f82d13f86b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilter.java @@ -6,32 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Query filter option for listing runs. */ @Fluent -public final class RunQueryFilter { +public final class RunQueryFilter implements JsonSerializable { /* * Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, * RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and * Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. */ - @JsonProperty(value = "operand", required = true) private RunQueryFilterOperand operand; /* * Operator to be used for filter. */ - @JsonProperty(value = "operator", required = true) private RunQueryFilterOperator operator; /* * List of filter values. */ - @JsonProperty(value = "values", required = true) private List values; /** @@ -127,4 +128,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RunQueryFilter.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("operand", this.operand == null ? null : this.operand.toString()); + jsonWriter.writeStringField("operator", this.operator == null ? null : this.operator.toString()); + jsonWriter.writeArrayField("values", this.values, (writer, element) -> writer.writeString(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RunQueryFilter from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RunQueryFilter if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RunQueryFilter. + */ + public static RunQueryFilter fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RunQueryFilter deserializedRunQueryFilter = new RunQueryFilter(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("operand".equals(fieldName)) { + deserializedRunQueryFilter.operand = RunQueryFilterOperand.fromString(reader.getString()); + } else if ("operator".equals(fieldName)) { + deserializedRunQueryFilter.operator = RunQueryFilterOperator.fromString(reader.getString()); + } else if ("values".equals(fieldName)) { + List values = reader.readArray(reader1 -> reader1.getString()); + deserializedRunQueryFilter.values = values; + } else { + reader.skipChildren(); + } + } + + return deserializedRunQueryFilter; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperand.java index 5b1e8dbb384f..f22ff614cf86 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperand.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -89,7 +88,6 @@ public RunQueryFilterOperand() { * @param name a name to look for. * @return the corresponding RunQueryFilterOperand. */ - @JsonCreator public static RunQueryFilterOperand fromString(String name) { return fromString(name, RunQueryFilterOperand.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperator.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperator.java index cac9f9228ead..ee05f1324fe8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperator.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryFilterOperator.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public RunQueryFilterOperator() { * @param name a name to look for. * @return the corresponding RunQueryFilterOperator. */ - @JsonCreator public static RunQueryFilterOperator fromString(String name) { return fromString(name, RunQueryFilterOperator.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrder.java index a3262c16a165..44594441a260 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrder.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public RunQueryOrder() { * @param name a name to look for. * @return the corresponding RunQueryOrder. */ - @JsonCreator public static RunQueryOrder fromString(String name) { return fromString(name, RunQueryOrder.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java index 0c30f194e1df..b3ba5a3ba917 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderBy.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * An object to provide order by options for listing runs. */ @Fluent -public final class RunQueryOrderBy { +public final class RunQueryOrderBy implements JsonSerializable { /* * Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, * RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for * trigger runs are TriggerName, TriggerRunTimestamp and Status. */ - @JsonProperty(value = "orderBy", required = true) private RunQueryOrderByField orderBy; /* * Sorting order of the parameter. */ - @JsonProperty(value = "order", required = true) private RunQueryOrder order; /** @@ -94,4 +96,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(RunQueryOrderBy.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("orderBy", this.orderBy == null ? null : this.orderBy.toString()); + jsonWriter.writeStringField("order", this.order == null ? null : this.order.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of RunQueryOrderBy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of RunQueryOrderBy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the RunQueryOrderBy. + */ + public static RunQueryOrderBy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + RunQueryOrderBy deserializedRunQueryOrderBy = new RunQueryOrderBy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("orderBy".equals(fieldName)) { + deserializedRunQueryOrderBy.orderBy = RunQueryOrderByField.fromString(reader.getString()); + } else if ("order".equals(fieldName)) { + deserializedRunQueryOrderBy.order = RunQueryOrder.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedRunQueryOrderBy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderByField.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderByField.java index dfe4f97dac4c..81967c572cc1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderByField.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/RunQueryOrderByField.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -74,7 +73,6 @@ public RunQueryOrderByField() { * @param name a name to look for. * @return the corresponding RunQueryOrderByField. */ - @JsonCreator public static RunQueryOrderByField fromString(String name) { return fromString(name, RunQueryOrderByField.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java index 9248daab2563..329042f9f13a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Salesforce. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceLinkedService.class, - visible = true) -@JsonTypeName("Salesforce") @Fluent public final class SalesforceLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Salesforce"; /* * Salesforce linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SalesforceLinkedServiceTypeProperties innerTypeProperties = new SalesforceLinkedServiceTypeProperties(); /** @@ -265,4 +257,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SalesforceLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceLinkedService. + */ + public static SalesforceLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceLinkedService deserializedSalesforceLinkedService = new SalesforceLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSalesforceLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceLinkedService.innerTypeProperties + = SalesforceLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSalesforceLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java index c01e29f3f18f..705c3bb4ec60 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceMarketingCloudLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Salesforce Marketing Cloud linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceMarketingCloudLinkedService.class, - visible = true) -@JsonTypeName("SalesforceMarketingCloud") @Fluent public final class SalesforceMarketingCloudLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceMarketingCloud"; /* * Salesforce Marketing Cloud linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SalesforceMarketingCloudLinkedServiceTypeProperties innerTypeProperties = new SalesforceMarketingCloudLinkedServiceTypeProperties(); @@ -295,4 +287,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SalesforceMarketingCloudLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceMarketingCloudLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceMarketingCloudLinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceMarketingCloudLinkedService. + */ + public static SalesforceMarketingCloudLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceMarketingCloudLinkedService deserializedSalesforceMarketingCloudLinkedService + = new SalesforceMarketingCloudLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceMarketingCloudLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceMarketingCloudLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedService.innerTypeProperties + = SalesforceMarketingCloudLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSalesforceMarketingCloudLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceMarketingCloudLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceMarketingCloudLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java index a72e0d8947ed..4a517d9e12ac 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Salesforce Marketing Cloud dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceMarketingCloudObjectDataset.class, - visible = true) -@JsonTypeName("SalesforceMarketingCloudObject") @Fluent public final class SalesforceMarketingCloudObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceMarketingCloudObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceMarketingCloudObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceMarketingCloudObjectDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceMarketingCloudObjectDataset. + */ + public static SalesforceMarketingCloudObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceMarketingCloudObjectDataset deserializedSalesforceMarketingCloudObjectDataset + = new SalesforceMarketingCloudObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceMarketingCloudObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceMarketingCloudObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceMarketingCloudObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceMarketingCloudObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceMarketingCloudObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java index 40a9fa28a17a..203d88d5e4f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceMarketingCloudSource.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce Marketing Cloud source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceMarketingCloudSource.class, - visible = true) -@JsonTypeName("SalesforceMarketingCloudSource") @Fluent public final class SalesforceMarketingCloudSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceMarketingCloudSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -135,4 +128,73 @@ public SalesforceMarketingCloudSource withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceMarketingCloudSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceMarketingCloudSource if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceMarketingCloudSource. + */ + public static SalesforceMarketingCloudSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceMarketingCloudSource deserializedSalesforceMarketingCloudSource + = new SalesforceMarketingCloudSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSalesforceMarketingCloudSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceMarketingCloudSource.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceMarketingCloudSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java index 08816741e6e2..8498d3622877 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceObjectDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Salesforce object dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceObjectDataset.class, - visible = true) -@JsonTypeName("SalesforceObject") @Fluent public final class SalesforceObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceObject"; /* * Salesforce object dataset properties. */ - @JsonProperty(value = "typeProperties") private SalesforceObjectDatasetTypeProperties innerTypeProperties; /** @@ -162,4 +154,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceObjectDataset. + */ + public static SalesforceObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceObjectDataset deserializedSalesforceObjectDataset = new SalesforceObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSalesforceObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSalesforceObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSalesforceObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSalesforceObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSalesforceObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceObjectDataset.innerTypeProperties + = SalesforceObjectDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java index 34aeb31e64e4..6cdee1507ceb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Salesforce Service Cloud. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudLinkedService.class, - visible = true) -@JsonTypeName("SalesforceServiceCloud") @Fluent public final class SalesforceServiceCloudLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloud"; /* * Salesforce Service Cloud linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SalesforceServiceCloudLinkedServiceTypeProperties innerTypeProperties = new SalesforceServiceCloudLinkedServiceTypeProperties(); @@ -293,4 +285,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SalesforceServiceCloudLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudLinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudLinkedService. + */ + public static SalesforceServiceCloudLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudLinkedService deserializedSalesforceServiceCloudLinkedService + = new SalesforceServiceCloudLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceServiceCloudLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceServiceCloudLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedService.innerTypeProperties + = SalesforceServiceCloudLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java index a436fd3bc891..bd64b128fa85 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudObjectDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Salesforce Service Cloud object dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudObjectDataset.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudObject") @Fluent public final class SalesforceServiceCloudObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudObject"; /* * Salesforce Service Cloud object dataset properties. */ - @JsonProperty(value = "typeProperties") private SalesforceServiceCloudObjectDatasetTypeProperties innerTypeProperties; /** @@ -162,4 +154,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudObjectDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudObjectDataset. + */ + public static SalesforceServiceCloudObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudObjectDataset deserializedSalesforceServiceCloudObjectDataset + = new SalesforceServiceCloudObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceServiceCloudObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceServiceCloudObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceServiceCloudObjectDataset.innerTypeProperties + = SalesforceServiceCloudObjectDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java index dc5144470317..6735c490781f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSink.java @@ -5,40 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce Service Cloud sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudSink.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudSink") @Fluent public final class SalesforceServiceCloudSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudSink"; /* * The write behavior for the operation. Default is Insert. */ - @JsonProperty(value = "writeBehavior") private SalesforceSinkWriteBehavior writeBehavior; /* * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* @@ -48,7 +40,6 @@ public final class SalesforceServiceCloudSink extends CopySink { * ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL * value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /** @@ -202,4 +193,79 @@ public SalesforceServiceCloudSink withDisableMetricsCollection(Object disableMet public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeUntypedField("externalIdFieldName", this.externalIdFieldName); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudSink if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudSink. + */ + public static SalesforceServiceCloudSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudSink deserializedSalesforceServiceCloudSink = new SalesforceServiceCloudSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.writeBehavior + = SalesforceSinkWriteBehavior.fromString(reader.getString()); + } else if ("externalIdFieldName".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.externalIdFieldName = reader.readUntyped(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedSalesforceServiceCloudSink.ignoreNullValues = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudSink.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java index 95cc3dc1bdfe..4b8d4f324334 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudSource.java @@ -5,47 +5,38 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce Service Cloud source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudSource.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudSource") @Fluent public final class SalesforceServiceCloudSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "readBehavior") private Object readBehavior; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -173,4 +164,72 @@ public SalesforceServiceCloudSource withDisableMetricsCollection(Object disableM public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("readBehavior", this.readBehavior); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudSource if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudSource. + */ + public static SalesforceServiceCloudSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudSource deserializedSalesforceServiceCloudSource = new SalesforceServiceCloudSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.query = reader.readUntyped(); + } else if ("readBehavior".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.readBehavior = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSalesforceServiceCloudSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudSource.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java index 8f7d0420eb3c..e00fe9be9a2b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Salesforce Service Cloud V2. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudV2LinkedService.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudV2") @Fluent public final class SalesforceServiceCloudV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudV2"; /* * Salesforce Service Cloud V2 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SalesforceServiceCloudV2LinkedServiceTypeProperties innerTypeProperties = new SalesforceServiceCloudV2LinkedServiceTypeProperties(); @@ -268,4 +260,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SalesforceServiceCloudV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2LinkedService if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2LinkedService. + */ + public static SalesforceServiceCloudV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2LinkedService deserializedSalesforceServiceCloudV2LinkedService + = new SalesforceServiceCloudV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceServiceCloudV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceServiceCloudV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedService.innerTypeProperties + = SalesforceServiceCloudV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java index 8f897fa3e041..3ee518495469 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2ObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2ObjectDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Salesforce Service Cloud V2 object dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudV2ObjectDataset.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudV2Object") @Fluent public final class SalesforceServiceCloudV2ObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudV2Object"; /* * Salesforce Service Cloud V2 object dataset properties. */ - @JsonProperty(value = "typeProperties") private SalesforceServiceCloudV2ObjectDatasetTypeProperties innerTypeProperties; /** @@ -187,4 +179,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2ObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2ObjectDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2ObjectDataset. + */ + public static SalesforceServiceCloudV2ObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2ObjectDataset deserializedSalesforceServiceCloudV2ObjectDataset + = new SalesforceServiceCloudV2ObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceServiceCloudV2ObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceServiceCloudV2ObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceServiceCloudV2ObjectDataset.innerTypeProperties + = SalesforceServiceCloudV2ObjectDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudV2ObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudV2ObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java index 49945ea75c2d..9d860c4299ba 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Sink.java @@ -5,40 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce Service Cloud V2 sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudV2Sink.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudV2Sink") @Fluent public final class SalesforceServiceCloudV2Sink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudV2Sink"; /* * The write behavior for the operation. Default is Insert. */ - @JsonProperty(value = "writeBehavior") private SalesforceV2SinkWriteBehavior writeBehavior; /* * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* @@ -48,7 +40,6 @@ public final class SalesforceServiceCloudV2Sink extends CopySink { * ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL * value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /** @@ -202,4 +193,79 @@ public SalesforceServiceCloudV2Sink withDisableMetricsCollection(Object disableM public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeUntypedField("externalIdFieldName", this.externalIdFieldName); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2Sink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2Sink if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2Sink. + */ + public static SalesforceServiceCloudV2Sink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2Sink deserializedSalesforceServiceCloudV2Sink = new SalesforceServiceCloudV2Sink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.writeBehavior + = SalesforceV2SinkWriteBehavior.fromString(reader.getString()); + } else if ("externalIdFieldName".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.externalIdFieldName = reader.readUntyped(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Sink.ignoreNullValues = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudV2Sink.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudV2Sink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java index c0fd80fe5a5b..9fb029a52be1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceServiceCloudV2Source.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce Service Cloud V2 source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceServiceCloudV2Source.class, - visible = true) -@JsonTypeName("SalesforceServiceCloudV2Source") @Fluent public final class SalesforceServiceCloudV2Source extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceServiceCloudV2Source"; /* * Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "SOQLQuery") private Object soqlQuery; /* @@ -41,21 +34,18 @@ public final class SalesforceServiceCloudV2Source extends CopySource { * If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset * will be retrieved. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "includeDeletedObjects") private Object includeDeletedObjects; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -213,4 +203,76 @@ public SalesforceServiceCloudV2Source withDisableMetricsCollection(Object disabl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("SOQLQuery", this.soqlQuery); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("includeDeletedObjects", this.includeDeletedObjects); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceServiceCloudV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceServiceCloudV2Source if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceServiceCloudV2Source. + */ + public static SalesforceServiceCloudV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceServiceCloudV2Source deserializedSalesforceServiceCloudV2Source + = new SalesforceServiceCloudV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.type = reader.getString(); + } else if ("SOQLQuery".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.soqlQuery = reader.readUntyped(); + } else if ("query".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.query = reader.readUntyped(); + } else if ("includeDeletedObjects".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.includeDeletedObjects = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSalesforceServiceCloudV2Source.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceServiceCloudV2Source.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceServiceCloudV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java index aa21bc62dca8..348cdbf3ccc9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSink.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceSink.class, visible = true) -@JsonTypeName("SalesforceSink") @Fluent public final class SalesforceSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceSink"; /* * The write behavior for the operation. Default is Insert. */ - @JsonProperty(value = "writeBehavior") private SalesforceSinkWriteBehavior writeBehavior; /* * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* @@ -44,7 +40,6 @@ public final class SalesforceSink extends CopySink { * ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL * value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /** @@ -198,4 +193,79 @@ public SalesforceSink withDisableMetricsCollection(Object disableMetricsCollecti public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeUntypedField("externalIdFieldName", this.externalIdFieldName); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceSink. + */ + public static SalesforceSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceSink deserializedSalesforceSink = new SalesforceSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSalesforceSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSalesforceSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSalesforceSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSalesforceSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSalesforceSink.writeBehavior + = SalesforceSinkWriteBehavior.fromString(reader.getString()); + } else if ("externalIdFieldName".equals(fieldName)) { + deserializedSalesforceSink.externalIdFieldName = reader.readUntyped(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedSalesforceSink.ignoreNullValues = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceSink.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSinkWriteBehavior.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSinkWriteBehavior.java index c5b6171cd2f6..38c2d25ce780 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSinkWriteBehavior.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSinkWriteBehavior.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public SalesforceSinkWriteBehavior() { * @param name a name to look for. * @return the corresponding SalesforceSinkWriteBehavior. */ - @JsonCreator public static SalesforceSinkWriteBehavior fromString(String name) { return fromString(name, SalesforceSinkWriteBehavior.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java index 61d219c8705c..7055c16d59fa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceSource.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceSource.class, visible = true) -@JsonTypeName("SalesforceSource") @Fluent public final class SalesforceSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "readBehavior") private Object readBehavior; /** @@ -158,4 +154,75 @@ public SalesforceSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("readBehavior", this.readBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceSource. + */ + public static SalesforceSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceSource deserializedSalesforceSource = new SalesforceSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSalesforceSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSalesforceSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSalesforceSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSalesforceSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSalesforceSource.query = reader.readUntyped(); + } else if ("readBehavior".equals(fieldName)) { + deserializedSalesforceSource.readBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceSource.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java index 20a916818df0..6ca97403fd52 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Salesforce V2. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceV2LinkedService.class, - visible = true) -@JsonTypeName("SalesforceV2") @Fluent public final class SalesforceV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceV2"; /* * Salesforce V2 linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SalesforceV2LinkedServiceTypeProperties innerTypeProperties = new SalesforceV2LinkedServiceTypeProperties(); /** @@ -267,4 +259,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SalesforceV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2LinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceV2LinkedService. + */ + public static SalesforceV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2LinkedService deserializedSalesforceV2LinkedService = new SalesforceV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSalesforceV2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceV2LinkedService.innerTypeProperties + = SalesforceV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSalesforceV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java index bf8d79e81f36..52ee78474c82 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2ObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2ObjectDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Salesforce V2 object dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SalesforceV2ObjectDataset.class, - visible = true) -@JsonTypeName("SalesforceV2Object") @Fluent public final class SalesforceV2ObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceV2Object"; /* * Salesforce V2 object dataset properties. */ - @JsonProperty(value = "typeProperties") private SalesforceV2ObjectDatasetTypeProperties innerTypeProperties; /** @@ -185,4 +177,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2ObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2ObjectDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SalesforceV2ObjectDataset. + */ + public static SalesforceV2ObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2ObjectDataset deserializedSalesforceV2ObjectDataset = new SalesforceV2ObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSalesforceV2ObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSalesforceV2ObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSalesforceV2ObjectDataset.innerTypeProperties + = SalesforceV2ObjectDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceV2ObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceV2ObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java index 1905fb50798b..a9f3ef4a1a68 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Sink.java @@ -5,36 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce V2 sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceV2Sink.class, visible = true) -@JsonTypeName("SalesforceV2Sink") @Fluent public final class SalesforceV2Sink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceV2Sink"; /* * The write behavior for the operation. Default is Insert. */ - @JsonProperty(value = "writeBehavior") private SalesforceV2SinkWriteBehavior writeBehavior; /* * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "externalIdFieldName") private Object externalIdFieldName; /* @@ -44,7 +40,6 @@ public final class SalesforceV2Sink extends CopySink { * ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL * value when doing insert operation. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "ignoreNullValues") private Object ignoreNullValues; /** @@ -198,4 +193,79 @@ public SalesforceV2Sink withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeUntypedField("externalIdFieldName", this.externalIdFieldName); + jsonWriter.writeUntypedField("ignoreNullValues", this.ignoreNullValues); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2Sink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2Sink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceV2Sink. + */ + public static SalesforceV2Sink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2Sink deserializedSalesforceV2Sink = new SalesforceV2Sink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSalesforceV2Sink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSalesforceV2Sink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSalesforceV2Sink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSalesforceV2Sink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceV2Sink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceV2Sink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceV2Sink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSalesforceV2Sink.writeBehavior + = SalesforceV2SinkWriteBehavior.fromString(reader.getString()); + } else if ("externalIdFieldName".equals(fieldName)) { + deserializedSalesforceV2Sink.externalIdFieldName = reader.readUntyped(); + } else if ("ignoreNullValues".equals(fieldName)) { + deserializedSalesforceV2Sink.ignoreNullValues = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceV2Sink.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceV2Sink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2SinkWriteBehavior.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2SinkWriteBehavior.java index 8398bbe0d3d9..2e094b1f128d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2SinkWriteBehavior.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2SinkWriteBehavior.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public SalesforceV2SinkWriteBehavior() { * @param name a name to look for. * @return the corresponding SalesforceV2SinkWriteBehavior. */ - @JsonCreator public static SalesforceV2SinkWriteBehavior fromString(String name) { return fromString(name, SalesforceV2SinkWriteBehavior.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java index cb8ffa26ac9b..c848a911d414 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SalesforceV2Source.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Salesforce V2 source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SalesforceV2Source.class, visible = true) -@JsonTypeName("SalesforceV2Source") @Fluent public final class SalesforceV2Source extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SalesforceV2Source"; /* * Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "SOQLQuery") private Object soqlQuery; /* @@ -37,14 +34,12 @@ public final class SalesforceV2Source extends TabularSource { * If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset * will be retrieved. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * This property control whether query result contains Deleted objects. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "includeDeletedObjects") private Object includeDeletedObjects; /** @@ -198,4 +193,78 @@ public SalesforceV2Source withDisableMetricsCollection(Object disableMetricsColl public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("SOQLQuery", this.soqlQuery); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("includeDeletedObjects", this.includeDeletedObjects); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SalesforceV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SalesforceV2Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SalesforceV2Source. + */ + public static SalesforceV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SalesforceV2Source deserializedSalesforceV2Source = new SalesforceV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSalesforceV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSalesforceV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSalesforceV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSalesforceV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSalesforceV2Source.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSalesforceV2Source.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSalesforceV2Source.type = reader.getString(); + } else if ("SOQLQuery".equals(fieldName)) { + deserializedSalesforceV2Source.soqlQuery = reader.readUntyped(); + } else if ("query".equals(fieldName)) { + deserializedSalesforceV2Source.query = reader.readUntyped(); + } else if ("includeDeletedObjects".equals(fieldName)) { + deserializedSalesforceV2Source.includeDeletedObjects = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSalesforceV2Source.withAdditionalProperties(additionalProperties); + + return deserializedSalesforceV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java index 7b0ae97fcfc3..c22e9d36adb7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBWLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapBWLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP Business Warehouse Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBWLinkedService.class, visible = true) -@JsonTypeName("SapBW") @Fluent public final class SapBWLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapBW"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SapBWLinkedServiceTypeProperties innerTypeProperties = new SapBWLinkedServiceTypeProperties(); /** @@ -259,4 +255,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapBWLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapBWLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapBWLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapBWLinkedService. + */ + public static SapBWLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapBWLinkedService deserializedSapBWLinkedService = new SapBWLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapBWLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapBWLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapBWLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapBWLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapBWLinkedService.innerTypeProperties + = SapBWLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapBWLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapBWLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapBWLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java index 8bf41a31502b..2a9e629b9fe8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwCubeDataset.java @@ -5,25 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The SAP BW cube dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBwCubeDataset.class, visible = true) -@JsonTypeName("SapBwCube") @Fluent public final class SapBwCubeDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapBwCube"; /** @@ -114,4 +111,76 @@ public SapBwCubeDataset withFolder(DatasetFolder folder) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapBwCubeDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapBwCubeDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapBwCubeDataset. + */ + public static SapBwCubeDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapBwCubeDataset deserializedSapBwCubeDataset = new SapBwCubeDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapBwCubeDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapBwCubeDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapBwCubeDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapBwCubeDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapBwCubeDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapBwCubeDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapBwCubeDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSapBwCubeDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapBwCubeDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapBwCubeDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java index beb381b660cc..7cb05095627d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapBwSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SapBW server via MDX. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapBwSource.class, visible = true) -@JsonTypeName("SapBwSource") @Fluent public final class SapBwSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapBwSource"; /* * MDX query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public SapBwSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapBwSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapBwSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapBwSource. + */ + public static SapBwSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapBwSource deserializedSapBwSource = new SapBwSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapBwSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapBwSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapBwSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapBwSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapBwSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapBwSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapBwSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSapBwSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapBwSource.withAdditionalProperties(additionalProperties); + + return deserializedSapBwSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java index 514ed0df3813..b211b0ca478d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for SAP Cloud for Customer. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapCloudForCustomerLinkedService.class, - visible = true) -@JsonTypeName("SapCloudForCustomer") @Fluent public final class SapCloudForCustomerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapCloudForCustomer"; /* * SAP Cloud for Customer linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapCloudForCustomerLinkedServiceTypeProperties innerTypeProperties = new SapCloudForCustomerLinkedServiceTypeProperties(); @@ -218,4 +210,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapCloudForCustomerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapCloudForCustomerLinkedService. + */ + public static SapCloudForCustomerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerLinkedService deserializedSapCloudForCustomerLinkedService + = new SapCloudForCustomerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapCloudForCustomerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapCloudForCustomerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedService.innerTypeProperties + = SapCloudForCustomerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapCloudForCustomerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapCloudForCustomerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapCloudForCustomerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java index 42e86094851a..765f8df37c1a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerResourceDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The path of the SAP Cloud for Customer OData entity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapCloudForCustomerResourceDataset.class, - visible = true) -@JsonTypeName("SapCloudForCustomerResource") @Fluent public final class SapCloudForCustomerResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapCloudForCustomerResource"; /* * SAP Cloud For Customer OData resource dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapCloudForCustomerResourceDatasetTypeProperties innerTypeProperties = new SapCloudForCustomerResourceDatasetTypeProperties(); @@ -170,4 +162,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapCloudForCustomerResourceDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerResourceDataset if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapCloudForCustomerResourceDataset. + */ + public static SapCloudForCustomerResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerResourceDataset deserializedSapCloudForCustomerResourceDataset + = new SapCloudForCustomerResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapCloudForCustomerResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapCloudForCustomerResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.innerTypeProperties + = SapCloudForCustomerResourceDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapCloudForCustomerResourceDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapCloudForCustomerResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapCloudForCustomerResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java index 360203d6704c..12d1ed3eeab8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSink.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SAP Cloud for Customer sink. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapCloudForCustomerSink.class, - visible = true) -@JsonTypeName("SapCloudForCustomerSink") @Fluent public final class SapCloudForCustomerSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapCloudForCustomerSink"; /* * The write behavior for the operation. Default is 'Insert'. */ - @JsonProperty(value = "writeBehavior") private SapCloudForCustomerSinkWriteBehavior writeBehavior; /* @@ -39,7 +32,6 @@ public final class SapCloudForCustomerSink extends CopySink { * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -165,4 +157,76 @@ public SapCloudForCustomerSink withDisableMetricsCollection(Object disableMetric public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("writeBehavior", this.writeBehavior == null ? null : this.writeBehavior.toString()); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerSink if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapCloudForCustomerSink. + */ + public static SapCloudForCustomerSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerSink deserializedSapCloudForCustomerSink = new SapCloudForCustomerSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapCloudForCustomerSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapCloudForCustomerSink.type = reader.getString(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSapCloudForCustomerSink.writeBehavior + = SapCloudForCustomerSinkWriteBehavior.fromString(reader.getString()); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedSapCloudForCustomerSink.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapCloudForCustomerSink.withAdditionalProperties(additionalProperties); + + return deserializedSapCloudForCustomerSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSinkWriteBehavior.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSinkWriteBehavior.java index 17a7366137d4..100a446d22e1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSinkWriteBehavior.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSinkWriteBehavior.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -38,7 +37,6 @@ public SapCloudForCustomerSinkWriteBehavior() { * @param name a name to look for. * @return the corresponding SapCloudForCustomerSinkWriteBehavior. */ - @JsonCreator public static SapCloudForCustomerSinkWriteBehavior fromString(String name) { return fromString(name, SapCloudForCustomerSinkWriteBehavior.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java index 75853df95145..4de9e87a3308 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapCloudForCustomerSource.java @@ -5,33 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP Cloud for Customer source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapCloudForCustomerSource.class, - visible = true) -@JsonTypeName("SapCloudForCustomerSource") @Fluent public final class SapCloudForCustomerSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapCloudForCustomerSource"; /* * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -39,7 +32,6 @@ public final class SapCloudForCustomerSource extends TabularSource { * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -167,4 +159,75 @@ public SapCloudForCustomerSource withDisableMetricsCollection(Object disableMetr public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapCloudForCustomerSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapCloudForCustomerSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapCloudForCustomerSource. + */ + public static SapCloudForCustomerSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapCloudForCustomerSource deserializedSapCloudForCustomerSource = new SapCloudForCustomerSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapCloudForCustomerSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapCloudForCustomerSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSapCloudForCustomerSource.query = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedSapCloudForCustomerSource.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapCloudForCustomerSource.withAdditionalProperties(additionalProperties); + + return deserializedSapCloudForCustomerSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java index c5157041fecc..15b1caf5de9d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapEccLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for SAP ERP Central Component(SAP ECC). */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccLinkedService.class, visible = true) -@JsonTypeName("SapEcc") @Fluent public final class SapEccLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapEcc"; /* * SAP ECC linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapEccLinkedServiceTypeProperties innerTypeProperties = new SapEccLinkedServiceTypeProperties(); /** @@ -213,4 +209,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapEccLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapEccLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapEccLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapEccLinkedService. + */ + public static SapEccLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapEccLinkedService deserializedSapEccLinkedService = new SapEccLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapEccLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapEccLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapEccLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapEccLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapEccLinkedService.innerTypeProperties + = SapEccLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapEccLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapEccLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapEccLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java index ed80256f202b..b6e79a4d86ce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccResourceDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapEccResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The path of the SAP ECC OData entity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccResourceDataset.class, visible = true) -@JsonTypeName("SapEccResource") @Fluent public final class SapEccResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapEccResource"; /* * SAP ECC OData resource dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapEccResourceDatasetTypeProperties innerTypeProperties = new SapEccResourceDatasetTypeProperties(); /** @@ -163,4 +159,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapEccResourceDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapEccResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapEccResourceDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapEccResourceDataset. + */ + public static SapEccResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapEccResourceDataset deserializedSapEccResourceDataset = new SapEccResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapEccResourceDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapEccResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapEccResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapEccResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapEccResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapEccResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapEccResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapEccResourceDataset.innerTypeProperties + = SapEccResourceDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapEccResourceDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapEccResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapEccResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java index 4ba0eb5a1b44..e07e16825147 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapEccSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP ECC source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapEccSource.class, visible = true) -@JsonTypeName("SapEccSource") @Fluent public final class SapEccSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapEccSource"; /* * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* @@ -35,7 +32,6 @@ public final class SapEccSource extends TabularSource { * response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -163,4 +159,75 @@ public SapEccSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapEccSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapEccSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapEccSource. + */ + public static SapEccSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapEccSource deserializedSapEccSource = new SapEccSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapEccSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapEccSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapEccSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapEccSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapEccSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapEccSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapEccSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSapEccSource.query = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedSapEccSource.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapEccSource.withAdditionalProperties(additionalProperties); + + return deserializedSapEccSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaAuthenticationType.java index 7e5d5b23a3ed..3a8cb1c18f29 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public SapHanaAuthenticationType() { * @param name a name to look for. * @return the corresponding SapHanaAuthenticationType. */ - @JsonCreator public static SapHanaAuthenticationType fromString(String name) { return fromString(name, SapHanaAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java index 9de0a5ac9467..5a1dc0f29293 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapHanaLinkedServiceProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP HANA Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaLinkedService.class, visible = true) -@JsonTypeName("SapHana") @Fluent public final class SapHanaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapHana"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SapHanaLinkedServiceProperties innerTypeProperties = new SapHanaLinkedServiceProperties(); /** @@ -257,4 +253,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapHanaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapHanaLinkedService. + */ + public static SapHanaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaLinkedService deserializedSapHanaLinkedService = new SapHanaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapHanaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapHanaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapHanaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapHanaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapHanaLinkedService.innerTypeProperties + = SapHanaLinkedServiceProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapHanaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapHanaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapHanaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java index b773c64a30ba..0780c9e0ad7f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaPartitionSettings.java @@ -5,18 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for SAP HANA source partitioning. */ @Fluent -public final class SapHanaPartitionSettings { +public final class SapHanaPartitionSettings implements JsonSerializable { /* * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /** @@ -54,4 +57,40 @@ public SapHanaPartitionSettings withPartitionColumnName(Object partitionColumnNa */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaPartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaPartitionSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapHanaPartitionSettings. + */ + public static SapHanaPartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaPartitionSettings deserializedSapHanaPartitionSettings = new SapHanaPartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionColumnName".equals(fieldName)) { + deserializedSapHanaPartitionSettings.partitionColumnName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapHanaPartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java index 74af051bd637..baf02b7f5d29 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaSource.java @@ -5,48 +5,42 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP HANA source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaSource.class, visible = true) -@JsonTypeName("SapHanaSource") @Fluent public final class SapHanaSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapHanaSource"; /* * SAP HANA Sql query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). */ - @JsonProperty(value = "packetSize") private Object packetSize; /* * The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "SapHanaDynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for SAP HANA source partitioning. */ - @JsonProperty(value = "partitionSettings") private SapHanaPartitionSettings partitionSettings; /** @@ -215,4 +209,81 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("packetSize", this.packetSize); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapHanaSource. + */ + public static SapHanaSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaSource deserializedSapHanaSource = new SapHanaSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapHanaSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapHanaSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapHanaSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapHanaSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapHanaSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapHanaSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapHanaSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSapHanaSource.query = reader.readUntyped(); + } else if ("packetSize".equals(fieldName)) { + deserializedSapHanaSource.packetSize = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSapHanaSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSapHanaSource.partitionSettings = SapHanaPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapHanaSource.withAdditionalProperties(additionalProperties); + + return deserializedSapHanaSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java index 384baf066445..f599d0af1bfc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapHanaTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapHanaTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP HANA Table properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapHanaTableDataset.class, visible = true) -@JsonTypeName("SapHanaTable") @Fluent public final class SapHanaTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapHanaTable"; /* * SAP HANA Table properties. */ - @JsonProperty(value = "typeProperties") private SapHanaTableDatasetTypeProperties innerTypeProperties; /** @@ -179,4 +175,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapHanaTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapHanaTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapHanaTableDataset. + */ + public static SapHanaTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapHanaTableDataset deserializedSapHanaTableDataset = new SapHanaTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapHanaTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapHanaTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapHanaTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapHanaTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapHanaTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapHanaTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapHanaTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSapHanaTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapHanaTableDataset.innerTypeProperties + = SapHanaTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapHanaTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapHanaTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java index 4c7bd0c10636..9b3949021b16 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapOdpLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP ODP Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpLinkedService.class, visible = true) -@JsonTypeName("SapOdp") @Fluent public final class SapOdpLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOdp"; /* * Properties specific to SAP ODP linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SapOdpLinkedServiceTypeProperties innerTypeProperties = new SapOdpLinkedServiceTypeProperties(); /** @@ -559,4 +555,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOdpLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOdpLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOdpLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOdpLinkedService. + */ + public static SapOdpLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOdpLinkedService deserializedSapOdpLinkedService = new SapOdpLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapOdpLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapOdpLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapOdpLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapOdpLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapOdpLinkedService.innerTypeProperties + = SapOdpLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapOdpLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOdpLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapOdpLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java index 4b20173fc4f2..ce4230c0afaa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpResourceDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapOdpResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP ODP Resource properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpResourceDataset.class, visible = true) -@JsonTypeName("SapOdpResource") @Fluent public final class SapOdpResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOdpResource"; /* * SAP ODP Resource properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapOdpResourceDatasetTypeProperties innerTypeProperties = new SapOdpResourceDatasetTypeProperties(); /** @@ -186,4 +182,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOdpResourceDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOdpResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOdpResourceDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOdpResourceDataset. + */ + public static SapOdpResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOdpResourceDataset deserializedSapOdpResourceDataset = new SapOdpResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapOdpResourceDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapOdpResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapOdpResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapOdpResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapOdpResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapOdpResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapOdpResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapOdpResourceDataset.innerTypeProperties + = SapOdpResourceDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapOdpResourceDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOdpResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapOdpResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java index e92b084730c3..6726f67f7336 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOdpSource.java @@ -5,50 +5,44 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP ODP source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOdpSource.class, visible = true) -@JsonTypeName("SapOdpSource") @Fluent public final class SapOdpSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOdpSource"; /* * The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "extractionMode") private Object extractionMode; /* * The subscriber process to manage the delta process. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "subscriberProcess") private Object subscriberProcess; /* * Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with * resultType array of objects). */ - @JsonProperty(value = "selection") private Object selection; /* * Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with * resultType array of objects). */ - @JsonProperty(value = "projection") private Object projection; /** @@ -218,4 +212,81 @@ public SapOdpSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("extractionMode", this.extractionMode); + jsonWriter.writeUntypedField("subscriberProcess", this.subscriberProcess); + jsonWriter.writeUntypedField("selection", this.selection); + jsonWriter.writeUntypedField("projection", this.projection); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOdpSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOdpSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapOdpSource. + */ + public static SapOdpSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOdpSource deserializedSapOdpSource = new SapOdpSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapOdpSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapOdpSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapOdpSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapOdpSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapOdpSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapOdpSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapOdpSource.type = reader.getString(); + } else if ("extractionMode".equals(fieldName)) { + deserializedSapOdpSource.extractionMode = reader.readUntyped(); + } else if ("subscriberProcess".equals(fieldName)) { + deserializedSapOdpSource.subscriberProcess = reader.readUntyped(); + } else if ("selection".equals(fieldName)) { + deserializedSapOdpSource.selection = reader.readUntyped(); + } else if ("projection".equals(fieldName)) { + deserializedSapOdpSource.projection = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOdpSource.withAdditionalProperties(additionalProperties); + + return deserializedSapOdpSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java index e501f216fbc0..7582666a7cfb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP Business Warehouse Open Hub Destination Linked Service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapOpenHubLinkedService.class, - visible = true) -@JsonTypeName("SapOpenHub") @Fluent public final class SapOpenHubLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOpenHub"; /* * Properties specific to SAP Business Warehouse Open Hub Destination linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SapOpenHubLinkedServiceTypeProperties innerTypeProperties = new SapOpenHubLinkedServiceTypeProperties(); /** @@ -393,4 +385,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOpenHubLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOpenHubLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOpenHubLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOpenHubLinkedService. + */ + public static SapOpenHubLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOpenHubLinkedService deserializedSapOpenHubLinkedService = new SapOpenHubLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapOpenHubLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapOpenHubLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapOpenHubLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapOpenHubLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapOpenHubLinkedService.innerTypeProperties + = SapOpenHubLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapOpenHubLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOpenHubLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapOpenHubLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java index f60b517e9376..72743ee7e33d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubSource.java @@ -5,51 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP Business Warehouse Open Hub Destination source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOpenHubSource.class, visible = true) -@JsonTypeName("SapOpenHubSource") @Fluent public final class SapOpenHubSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOpenHubSource"; /* * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "excludeLastRequest") private Object excludeLastRequest; /* * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this * property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ - @JsonProperty(value = "baseRequestId") private Object baseRequestId; /* * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "customRfcReadTableFunctionModule") private Object customRfcReadTableFunctionModule; /* * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data * retrieved. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sapDataColumnDelimiter") private Object sapDataColumnDelimiter; /** @@ -221,4 +215,81 @@ public SapOpenHubSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("excludeLastRequest", this.excludeLastRequest); + jsonWriter.writeUntypedField("baseRequestId", this.baseRequestId); + jsonWriter.writeUntypedField("customRfcReadTableFunctionModule", this.customRfcReadTableFunctionModule); + jsonWriter.writeUntypedField("sapDataColumnDelimiter", this.sapDataColumnDelimiter); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOpenHubSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOpenHubSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapOpenHubSource. + */ + public static SapOpenHubSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOpenHubSource deserializedSapOpenHubSource = new SapOpenHubSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapOpenHubSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapOpenHubSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapOpenHubSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapOpenHubSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapOpenHubSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapOpenHubSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapOpenHubSource.type = reader.getString(); + } else if ("excludeLastRequest".equals(fieldName)) { + deserializedSapOpenHubSource.excludeLastRequest = reader.readUntyped(); + } else if ("baseRequestId".equals(fieldName)) { + deserializedSapOpenHubSource.baseRequestId = reader.readUntyped(); + } else if ("customRfcReadTableFunctionModule".equals(fieldName)) { + deserializedSapOpenHubSource.customRfcReadTableFunctionModule = reader.readUntyped(); + } else if ("sapDataColumnDelimiter".equals(fieldName)) { + deserializedSapOpenHubSource.sapDataColumnDelimiter = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOpenHubSource.withAdditionalProperties(additionalProperties); + + return deserializedSapOpenHubSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java index 7b6f254359be..310ac25abc54 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapOpenHubTableDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Sap Business Warehouse Open Hub Destination Table properties. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapOpenHubTableDataset.class, visible = true) -@JsonTypeName("SapOpenHubTable") @Fluent public final class SapOpenHubTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapOpenHubTable"; /* * Sap Business Warehouse Open Hub Destination Table properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapOpenHubTableDatasetTypeProperties innerTypeProperties = new SapOpenHubTableDatasetTypeProperties(); /** @@ -217,4 +213,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapOpenHubTableDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapOpenHubTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapOpenHubTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapOpenHubTableDataset. + */ + public static SapOpenHubTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapOpenHubTableDataset deserializedSapOpenHubTableDataset = new SapOpenHubTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapOpenHubTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapOpenHubTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapOpenHubTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapOpenHubTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapOpenHubTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapOpenHubTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapOpenHubTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapOpenHubTableDataset.innerTypeProperties + = SapOpenHubTableDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapOpenHubTableDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapOpenHubTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapOpenHubTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java index 97bcd51695b1..aef353ea86db 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapTableLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP Table Linked Service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapTableLinkedService.class, visible = true) -@JsonTypeName("SapTable") @Fluent public final class SapTableLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapTable"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SapTableLinkedServiceTypeProperties innerTypeProperties = new SapTableLinkedServiceTypeProperties(); /** @@ -511,4 +507,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapTableLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTableLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTableLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapTableLinkedService. + */ + public static SapTableLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTableLinkedService deserializedSapTableLinkedService = new SapTableLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSapTableLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapTableLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapTableLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapTableLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapTableLinkedService.innerTypeProperties + = SapTableLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapTableLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapTableLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSapTableLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java index 21f72568da12..dd6e4e476642 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTablePartitionSettings.java @@ -5,39 +5,39 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for SAP table source partitioning. */ @Fluent -public final class SapTablePartitionSettings { +public final class SapTablePartitionSettings implements JsonSerializable { /* * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /* * The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType * string). */ - @JsonProperty(value = "maxPartitionsNumber") private Object maxPartitionsNumber; /** @@ -141,4 +141,49 @@ public SapTablePartitionSettings withMaxPartitionsNumber(Object maxPartitionsNum */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + jsonWriter.writeUntypedField("maxPartitionsNumber", this.maxPartitionsNumber); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTablePartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTablePartitionSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SapTablePartitionSettings. + */ + public static SapTablePartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTablePartitionSettings deserializedSapTablePartitionSettings = new SapTablePartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionColumnName".equals(fieldName)) { + deserializedSapTablePartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedSapTablePartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedSapTablePartitionSettings.partitionLowerBound = reader.readUntyped(); + } else if ("maxPartitionsNumber".equals(fieldName)) { + deserializedSapTablePartitionSettings.maxPartitionsNumber = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSapTablePartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java index c6dfc089f81c..0f732b6d7cdf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableResourceDataset.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SapTableResourceDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SAP Table Resource properties. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SapTableResourceDataset.class, - visible = true) -@JsonTypeName("SapTableResource") @Fluent public final class SapTableResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapTableResource"; /* * SAP Table Resource properties. */ - @JsonProperty(value = "typeProperties", required = true) private SapTableResourceDatasetTypeProperties innerTypeProperties = new SapTableResourceDatasetTypeProperties(); /** @@ -167,4 +159,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SapTableResourceDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTableResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTableResourceDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SapTableResourceDataset. + */ + public static SapTableResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTableResourceDataset deserializedSapTableResourceDataset = new SapTableResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSapTableResourceDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSapTableResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSapTableResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSapTableResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSapTableResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSapTableResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSapTableResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSapTableResourceDataset.innerTypeProperties + = SapTableResourceDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSapTableResourceDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapTableResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedSapTableResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java index 8d786840edd7..06a2ac6bacdc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SapTableSource.java @@ -5,70 +5,61 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for SAP Table source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SapTableSource.class, visible = true) -@JsonTypeName("SapTableSource") @Fluent public final class SapTableSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SapTableSource"; /* * The number of rows to be retrieved. Type: integer(or Expression with resultType integer). */ - @JsonProperty(value = "rowCount") private Object rowCount; /* * The number of rows that will be skipped. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "rowSkips") private Object rowSkips; /* * The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "rfcTableFields") private Object rfcTableFields; /* * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "rfcTableOptions") private Object rfcTableOptions; /* * Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: * integer (or Expression with resultType integer). */ - @JsonProperty(value = "batchSize") private Object batchSize; /* * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "customRfcReadTableFunctionModule") private Object customRfcReadTableFunctionModule; /* * The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data * retrieved. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sapDataColumnDelimiter") private Object sapDataColumnDelimiter; /* @@ -76,13 +67,11 @@ public final class SapTableSource extends TabularSource { * "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", * "PartitionOnTime". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for SAP table source partitioning. */ - @JsonProperty(value = "partitionSettings") private SapTablePartitionSettings partitionSettings; /** @@ -365,4 +354,96 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("rowCount", this.rowCount); + jsonWriter.writeUntypedField("rowSkips", this.rowSkips); + jsonWriter.writeUntypedField("rfcTableFields", this.rfcTableFields); + jsonWriter.writeUntypedField("rfcTableOptions", this.rfcTableOptions); + jsonWriter.writeUntypedField("batchSize", this.batchSize); + jsonWriter.writeUntypedField("customRfcReadTableFunctionModule", this.customRfcReadTableFunctionModule); + jsonWriter.writeUntypedField("sapDataColumnDelimiter", this.sapDataColumnDelimiter); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SapTableSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SapTableSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SapTableSource. + */ + public static SapTableSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SapTableSource deserializedSapTableSource = new SapTableSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSapTableSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSapTableSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSapTableSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSapTableSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSapTableSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSapTableSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSapTableSource.type = reader.getString(); + } else if ("rowCount".equals(fieldName)) { + deserializedSapTableSource.rowCount = reader.readUntyped(); + } else if ("rowSkips".equals(fieldName)) { + deserializedSapTableSource.rowSkips = reader.readUntyped(); + } else if ("rfcTableFields".equals(fieldName)) { + deserializedSapTableSource.rfcTableFields = reader.readUntyped(); + } else if ("rfcTableOptions".equals(fieldName)) { + deserializedSapTableSource.rfcTableOptions = reader.readUntyped(); + } else if ("batchSize".equals(fieldName)) { + deserializedSapTableSource.batchSize = reader.readUntyped(); + } else if ("customRfcReadTableFunctionModule".equals(fieldName)) { + deserializedSapTableSource.customRfcReadTableFunctionModule = reader.readUntyped(); + } else if ("sapDataColumnDelimiter".equals(fieldName)) { + deserializedSapTableSource.sapDataColumnDelimiter = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSapTableSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSapTableSource.partitionSettings = SapTablePartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSapTableSource.withAdditionalProperties(additionalProperties); + + return deserializedSapTableSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java index ab79d1a47eac..2868eb9fe049 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTrigger.java @@ -6,33 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ScheduleTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that creates pipeline runs periodically, on schedule. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ScheduleTrigger.class, visible = true) -@JsonTypeName("ScheduleTrigger") @Fluent public final class ScheduleTrigger extends MultiplePipelineTrigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ScheduleTrigger"; /* * Schedule Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private ScheduleTriggerTypeProperties innerTypeProperties = new ScheduleTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of ScheduleTrigger class. */ @@ -58,6 +60,17 @@ private ScheduleTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -126,4 +139,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScheduleTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeArrayField("pipelines", pipelines(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScheduleTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScheduleTrigger if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScheduleTrigger. + */ + public static ScheduleTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScheduleTrigger deserializedScheduleTrigger = new ScheduleTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedScheduleTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedScheduleTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedScheduleTrigger.withAnnotations(annotations); + } else if ("pipelines".equals(fieldName)) { + List pipelines + = reader.readArray(reader1 -> TriggerPipelineReference.fromJson(reader1)); + deserializedScheduleTrigger.withPipelines(pipelines); + } else if ("typeProperties".equals(fieldName)) { + deserializedScheduleTrigger.innerTypeProperties = ScheduleTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedScheduleTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedScheduleTrigger.withAdditionalProperties(additionalProperties); + + return deserializedScheduleTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTriggerRecurrence.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTriggerRecurrence.java index 3be0e6801d49..af2b52cceb43 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTriggerRecurrence.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScheduleTriggerRecurrence.java @@ -5,11 +5,14 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; import java.util.LinkedHashMap; import java.util.Map; @@ -17,47 +20,40 @@ * The workflow trigger recurrence. */ @Fluent -public final class ScheduleTriggerRecurrence { +public final class ScheduleTriggerRecurrence implements JsonSerializable { /* * The frequency. */ - @JsonProperty(value = "frequency") private RecurrenceFrequency frequency; /* * The interval. */ - @JsonProperty(value = "interval") private Integer interval; /* * The start time. */ - @JsonProperty(value = "startTime") private OffsetDateTime startTime; /* * The end time. */ - @JsonProperty(value = "endTime") private OffsetDateTime endTime; /* * The time zone. */ - @JsonProperty(value = "timeZone") private String timeZone; /* * The recurrence schedule. */ - @JsonProperty(value = "schedule") private RecurrenceSchedule schedule; /* * The workflow trigger recurrence. */ - @JsonIgnore private Map additionalProperties; /** @@ -191,7 +187,6 @@ public ScheduleTriggerRecurrence withSchedule(RecurrenceSchedule schedule) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -207,14 +202,6 @@ public ScheduleTriggerRecurrence withAdditionalProperties(Map ad return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -225,4 +212,71 @@ public void validate() { schedule().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("frequency", this.frequency == null ? null : this.frequency.toString()); + jsonWriter.writeNumberField("interval", this.interval); + jsonWriter.writeStringField("startTime", + this.startTime == null ? null : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.startTime)); + jsonWriter.writeStringField("endTime", + this.endTime == null ? null : DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(this.endTime)); + jsonWriter.writeStringField("timeZone", this.timeZone); + jsonWriter.writeJsonField("schedule", this.schedule); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScheduleTriggerRecurrence from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScheduleTriggerRecurrence if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ScheduleTriggerRecurrence. + */ + public static ScheduleTriggerRecurrence fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScheduleTriggerRecurrence deserializedScheduleTriggerRecurrence = new ScheduleTriggerRecurrence(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("frequency".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.frequency + = RecurrenceFrequency.fromString(reader.getString()); + } else if ("interval".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.interval = reader.getNullable(JsonReader::getInt); + } else if ("startTime".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.startTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("endTime".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.endTime = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("timeZone".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.timeZone = reader.getString(); + } else if ("schedule".equals(fieldName)) { + deserializedScheduleTriggerRecurrence.schedule = RecurrenceSchedule.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedScheduleTriggerRecurrence.additionalProperties = additionalProperties; + + return deserializedScheduleTriggerRecurrence; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java index 29f7089230da..d99ac877e5a4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptAction.java @@ -6,35 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Custom script action to run on HDI ondemand cluster once it's up. */ @Fluent -public final class ScriptAction { +public final class ScriptAction implements JsonSerializable { /* * The user provided name of the script action. */ - @JsonProperty(value = "name", required = true) private String name; /* * The URI for the script action. */ - @JsonProperty(value = "uri", required = true) private String uri; /* * The node types on which the script action should be executed. */ - @JsonProperty(value = "roles", required = true) private Object roles; /* * The parameters for the script action. */ - @JsonProperty(value = "parameters") private String parameters; /** @@ -144,4 +144,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScriptAction.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("uri", this.uri); + jsonWriter.writeUntypedField("roles", this.roles); + jsonWriter.writeStringField("parameters", this.parameters); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptAction from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptAction if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScriptAction. + */ + public static ScriptAction fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptAction deserializedScriptAction = new ScriptAction(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedScriptAction.name = reader.getString(); + } else if ("uri".equals(fieldName)) { + deserializedScriptAction.uri = reader.getString(); + } else if ("roles".equals(fieldName)) { + deserializedScriptAction.roles = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + deserializedScriptAction.parameters = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedScriptAction; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java index 47b37309f029..9bfbcb9c433c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ScriptActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Script activity type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ScriptActivity.class, visible = true) -@JsonTypeName("Script") @Fluent public final class ScriptActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Script"; /* * Script activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ScriptActivityTypeProperties innerTypeProperties = new ScriptActivityTypeProperties(); /** @@ -219,4 +216,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScriptActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScriptActivity. + */ + public static ScriptActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptActivity deserializedScriptActivity = new ScriptActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedScriptActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedScriptActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedScriptActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedScriptActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedScriptActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedScriptActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedScriptActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedScriptActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedScriptActivity.innerTypeProperties = ScriptActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedScriptActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedScriptActivity.withAdditionalProperties(additionalProperties); + + return deserializedScriptActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityLogDestination.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityLogDestination.java index ec5f8f263657..3ca47fda77df 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityLogDestination.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityLogDestination.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public ScriptActivityLogDestination() { * @param name a name to look for. * @return the corresponding ScriptActivityLogDestination. */ - @JsonCreator public static ScriptActivityLogDestination fromString(String name) { return fromString(name, ScriptActivityLogDestination.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameter.java index d1c480fb70bf..8a2393856a9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameter.java @@ -5,41 +5,40 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Parameters of a script block. */ @Fluent -public final class ScriptActivityParameter { +public final class ScriptActivityParameter implements JsonSerializable { /* * The name of the parameter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "name") private Object name; /* * The type of the parameter. */ - @JsonProperty(value = "type") private ScriptActivityParameterType type; /* * The value of the parameter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value") private Object value; /* * The direction of the parameter. */ - @JsonProperty(value = "direction") private ScriptActivityParameterDirection direction; /* * The size of the output direction parameter. */ - @JsonProperty(value = "size") private Integer size; /** @@ -155,4 +154,54 @@ public ScriptActivityParameter withSize(Integer size) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("name", this.name); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("value", this.value); + jsonWriter.writeStringField("direction", this.direction == null ? null : this.direction.toString()); + jsonWriter.writeNumberField("size", this.size); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptActivityParameter from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptActivityParameter if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ScriptActivityParameter. + */ + public static ScriptActivityParameter fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptActivityParameter deserializedScriptActivityParameter = new ScriptActivityParameter(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedScriptActivityParameter.name = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedScriptActivityParameter.type + = ScriptActivityParameterType.fromString(reader.getString()); + } else if ("value".equals(fieldName)) { + deserializedScriptActivityParameter.value = reader.readUntyped(); + } else if ("direction".equals(fieldName)) { + deserializedScriptActivityParameter.direction + = ScriptActivityParameterDirection.fromString(reader.getString()); + } else if ("size".equals(fieldName)) { + deserializedScriptActivityParameter.size = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedScriptActivityParameter; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterDirection.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterDirection.java index bbee01ad5328..92cd2c2a91d4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterDirection.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterDirection.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public ScriptActivityParameterDirection() { * @param name a name to look for. * @return the corresponding ScriptActivityParameterDirection. */ - @JsonCreator public static ScriptActivityParameterDirection fromString(String name) { return fromString(name, ScriptActivityParameterDirection.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterType.java index 72f68247aade..44aba4b0ca5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityParameterType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -87,7 +86,6 @@ public ScriptActivityParameterType() { * @param name a name to look for. * @return the corresponding ScriptActivityParameterType. */ - @JsonCreator public static ScriptActivityParameterType fromString(String name) { return fromString(name, ScriptActivityParameterType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java index a8c9a5eed29d..aeade7f1ffb6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityScriptBlock.java @@ -6,31 +6,32 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Script block of scripts. */ @Fluent -public final class ScriptActivityScriptBlock { +public final class ScriptActivityScriptBlock implements JsonSerializable { /* * The query text. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "text", required = true) private Object text; /* * The type of the query. Please refer to the ScriptType for valid options. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "type", required = true) private Object type; /* * Array of script parameters. Type: array. */ - @JsonProperty(value = "parameters") private List parameters; /** @@ -121,4 +122,49 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScriptActivityScriptBlock.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("text", this.text); + jsonWriter.writeUntypedField("type", this.type); + jsonWriter.writeArrayField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptActivityScriptBlock from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptActivityScriptBlock if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScriptActivityScriptBlock. + */ + public static ScriptActivityScriptBlock fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptActivityScriptBlock deserializedScriptActivityScriptBlock = new ScriptActivityScriptBlock(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("text".equals(fieldName)) { + deserializedScriptActivityScriptBlock.text = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedScriptActivityScriptBlock.type = reader.readUntyped(); + } else if ("parameters".equals(fieldName)) { + List parameters + = reader.readArray(reader1 -> ScriptActivityParameter.fromJson(reader1)); + deserializedScriptActivityScriptBlock.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedScriptActivityScriptBlock; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java index e5e3f313b283..b7459d748cc3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ScriptActivityTypePropertiesLogSettings.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Log settings of script activity. */ @Fluent -public final class ScriptActivityTypePropertiesLogSettings { +public final class ScriptActivityTypePropertiesLogSettings + implements JsonSerializable { /* * The destination of logs. Type: string. */ - @JsonProperty(value = "logDestination", required = true) private ScriptActivityLogDestination logDestination; /* * Log location settings customer needs to provide when enabling log. */ - @JsonProperty(value = "logLocationSettings") private LogLocationSettings logLocationSettings; /** @@ -88,4 +91,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ScriptActivityTypePropertiesLogSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("logDestination", + this.logDestination == null ? null : this.logDestination.toString()); + jsonWriter.writeJsonField("logLocationSettings", this.logLocationSettings); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ScriptActivityTypePropertiesLogSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ScriptActivityTypePropertiesLogSettings if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ScriptActivityTypePropertiesLogSettings. + */ + public static ScriptActivityTypePropertiesLogSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ScriptActivityTypePropertiesLogSettings deserializedScriptActivityTypePropertiesLogSettings + = new ScriptActivityTypePropertiesLogSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("logDestination".equals(fieldName)) { + deserializedScriptActivityTypePropertiesLogSettings.logDestination + = ScriptActivityLogDestination.fromString(reader.getString()); + } else if ("logLocationSettings".equals(fieldName)) { + deserializedScriptActivityTypePropertiesLogSettings.logLocationSettings + = LogLocationSettings.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedScriptActivityTypePropertiesLogSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java index 0257e68700a8..a4a6ac1f9450 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecretBase.java @@ -5,27 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Immutable; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The base definition of a secret type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SecretBase.class, visible = true) -@JsonTypeName("SecretBase") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "SecureString", value = SecureString.class), - @JsonSubTypes.Type(name = "AzureKeyVaultSecret", value = AzureKeyVaultSecretReference.class) }) @Immutable -public class SecretBase { +public class SecretBase implements JsonSerializable { /* * Type of the secret. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SecretBase"; /** @@ -50,4 +43,67 @@ public String type() { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SecretBase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SecretBase if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SecretBase. + */ + public static SecretBase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("SecureString".equals(discriminatorValue)) { + return SecureString.fromJson(readerToUse.reset()); + } else if ("AzureKeyVaultSecret".equals(discriminatorValue)) { + return AzureKeyVaultSecretReference.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static SecretBase fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SecretBase deserializedSecretBase = new SecretBase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSecretBase.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSecretBase; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureInputOutputPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureInputOutputPolicy.java index 7d67b94f2d5c..0be9ea020c33 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureInputOutputPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureInputOutputPolicy.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Execution policy for an activity that supports secure input and output. */ @Fluent -public final class SecureInputOutputPolicy { +public final class SecureInputOutputPolicy implements JsonSerializable { /* * When set to true, Input from activity is considered as secure and will not be logged to monitoring. */ - @JsonProperty(value = "secureInput") private Boolean secureInput; /* * When set to true, Output from activity is considered as secure and will not be logged to monitoring. */ - @JsonProperty(value = "secureOutput") private Boolean secureOutput; /** @@ -81,4 +83,43 @@ public SecureInputOutputPolicy withSecureOutput(Boolean secureOutput) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeBooleanField("secureInput", this.secureInput); + jsonWriter.writeBooleanField("secureOutput", this.secureOutput); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SecureInputOutputPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SecureInputOutputPolicy if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the SecureInputOutputPolicy. + */ + public static SecureInputOutputPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SecureInputOutputPolicy deserializedSecureInputOutputPolicy = new SecureInputOutputPolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("secureInput".equals(fieldName)) { + deserializedSecureInputOutputPolicy.secureInput = reader.getNullable(JsonReader::getBoolean); + } else if ("secureOutput".equals(fieldName)) { + deserializedSecureInputOutputPolicy.secureOutput = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedSecureInputOutputPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java index 72a96d361ccb..39bcd5dee351 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SecureString.java @@ -6,30 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List * API calls. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SecureString.class, visible = true) -@JsonTypeName("SecureString") @Fluent public final class SecureString extends SecretBase { /* * Type of the secret. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SecureString"; /* * Value of secure string. */ - @JsonProperty(value = "value", required = true) private String value; /** @@ -83,4 +78,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SecureString.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("value", this.value); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SecureString from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SecureString if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SecureString. + */ + public static SecureString fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SecureString deserializedSecureString = new SecureString(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedSecureString.value = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedSecureString.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSecureString; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java index c6bafe89a425..9e4ee894603a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfDependencyTumblingWindowTriggerReference.java @@ -6,40 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Self referenced tumbling window trigger dependency. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SelfDependencyTumblingWindowTriggerReference.class, - visible = true) -@JsonTypeName("SelfDependencyTumblingWindowTriggerReference") @Fluent public final class SelfDependencyTumblingWindowTriggerReference extends DependencyReference { /* * The type of dependency reference. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SelfDependencyTumblingWindowTriggerReference"; /* * Timespan applied to the start time of a tumbling window when evaluating dependency. */ - @JsonProperty(value = "offset", required = true) private String offset; /* * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be * used. */ - @JsonProperty(value = "size") private String size; /** @@ -116,4 +106,48 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SelfDependencyTumblingWindowTriggerReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("offset", this.offset); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("size", this.size); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfDependencyTumblingWindowTriggerReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfDependencyTumblingWindowTriggerReference if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SelfDependencyTumblingWindowTriggerReference. + */ + public static SelfDependencyTumblingWindowTriggerReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfDependencyTumblingWindowTriggerReference deserializedSelfDependencyTumblingWindowTriggerReference + = new SelfDependencyTumblingWindowTriggerReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("offset".equals(fieldName)) { + deserializedSelfDependencyTumblingWindowTriggerReference.offset = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedSelfDependencyTumblingWindowTriggerReference.type = reader.getString(); + } else if ("size".equals(fieldName)) { + deserializedSelfDependencyTumblingWindowTriggerReference.size = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSelfDependencyTumblingWindowTriggerReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java index dd7003a6bdca..aa67952501b4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntime.java @@ -5,35 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Self-hosted integration runtime. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SelfHostedIntegrationRuntime.class, - visible = true) -@JsonTypeName("SelfHosted") @Fluent public final class SelfHostedIntegrationRuntime extends IntegrationRuntime { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.SELF_HOSTED; /* * When this property is not null, means this is a linked integration runtime. The property is used to access * original integration runtime. */ - @JsonProperty(value = "typeProperties") private SelfHostedIntegrationRuntimeTypeProperties innerTypeProperties; /** @@ -135,4 +128,59 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfHostedIntegrationRuntime from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfHostedIntegrationRuntime if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SelfHostedIntegrationRuntime. + */ + public static SelfHostedIntegrationRuntime fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfHostedIntegrationRuntime deserializedSelfHostedIntegrationRuntime = new SelfHostedIntegrationRuntime(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntime.withDescription(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntime.type + = IntegrationRuntimeType.fromString(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntime.innerTypeProperties + = SelfHostedIntegrationRuntimeTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSelfHostedIntegrationRuntime.withAdditionalProperties(additionalProperties); + + return deserializedSelfHostedIntegrationRuntime; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeNodeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeNodeStatus.java index e98e30660597..fc4147156d88 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeNodeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeNodeStatus.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -63,7 +62,6 @@ public SelfHostedIntegrationRuntimeNodeStatus() { * @param name a name to look for. * @return the corresponding SelfHostedIntegrationRuntimeNodeStatus. */ - @JsonCreator public static SelfHostedIntegrationRuntimeNodeStatus fromString(String name) { return fromString(name, SelfHostedIntegrationRuntimeNodeStatus.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java index 4f98a75fef13..983da1801de6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SelfHostedIntegrationRuntimeStatus.java @@ -6,41 +6,43 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner; import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeStatusTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.time.OffsetDateTime; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Self-hosted integration runtime status. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SelfHostedIntegrationRuntimeStatus.class, - visible = true) -@JsonTypeName("SelfHosted") @Fluent public final class SelfHostedIntegrationRuntimeStatus extends IntegrationRuntimeStatus { /* * Type of integration runtime. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private IntegrationRuntimeType type = IntegrationRuntimeType.SELF_HOSTED; /* * Self-hosted integration runtime status type properties. */ - @JsonProperty(value = "typeProperties", required = true) private SelfHostedIntegrationRuntimeStatusTypeProperties innerTypeProperties = new SelfHostedIntegrationRuntimeStatusTypeProperties(); + /* + * The data factory name which the integration runtime belong to. + */ + private String dataFactoryName; + + /* + * The state of integration runtime. + */ + private IntegrationRuntimeState state; + /** * Creates an instance of SelfHostedIntegrationRuntimeStatus class. */ @@ -66,6 +68,26 @@ private SelfHostedIntegrationRuntimeStatusTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the dataFactoryName property: The data factory name which the integration runtime belong to. + * + * @return the dataFactoryName value. + */ + @Override + public String dataFactoryName() { + return this.dataFactoryName; + } + + /** + * Get the state property: The state of integration runtime. + * + * @return the state value. + */ + @Override + public IntegrationRuntimeState state() { + return this.state; + } + /** * Get the createTime property: The time at which the integration runtime was created, in ISO8601 format. * @@ -273,4 +295,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SelfHostedIntegrationRuntimeStatus.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SelfHostedIntegrationRuntimeStatus from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SelfHostedIntegrationRuntimeStatus if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SelfHostedIntegrationRuntimeStatus. + */ + public static SelfHostedIntegrationRuntimeStatus fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SelfHostedIntegrationRuntimeStatus deserializedSelfHostedIntegrationRuntimeStatus + = new SelfHostedIntegrationRuntimeStatus(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("dataFactoryName".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatus.dataFactoryName = reader.getString(); + } else if ("state".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatus.state + = IntegrationRuntimeState.fromString(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatus.innerTypeProperties + = SelfHostedIntegrationRuntimeStatusTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSelfHostedIntegrationRuntimeStatus.type + = IntegrationRuntimeType.fromString(reader.getString()); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSelfHostedIntegrationRuntimeStatus.withAdditionalProperties(additionalProperties); + + return deserializedSelfHostedIntegrationRuntimeStatus; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowAuthenticationType.java index 79bff181a635..7dee4dcf14e9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public ServiceNowAuthenticationType() { * @param name a name to look for. * @return the corresponding ServiceNowAuthenticationType. */ - @JsonCreator public static ServiceNowAuthenticationType fromString(String name) { return fromString(name, ServiceNowAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java index fc33c7158139..ae14cec24fbb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ServiceNowLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * ServiceNow server linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ServiceNowLinkedService.class, - visible = true) -@JsonTypeName("ServiceNow") @Fluent public final class ServiceNowLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNow"; /* * ServiceNow server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ServiceNowLinkedServiceTypeProperties innerTypeProperties = new ServiceNowLinkedServiceTypeProperties(); /** @@ -357,4 +349,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ServiceNowLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowLinkedService. + */ + public static ServiceNowLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowLinkedService deserializedServiceNowLinkedService = new ServiceNowLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedServiceNowLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedServiceNowLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedServiceNowLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedServiceNowLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedServiceNowLinkedService.innerTypeProperties + = ServiceNowLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedServiceNowLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java index 929566ea4459..34fd2fec017e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * ServiceNow server dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ServiceNowObjectDataset.class, - visible = true) -@JsonTypeName("ServiceNowObject") @Fluent public final class ServiceNowObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNowObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowObjectDataset. + */ + public static ServiceNowObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowObjectDataset deserializedServiceNowObjectDataset = new ServiceNowObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedServiceNowObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedServiceNowObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedServiceNowObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedServiceNowObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedServiceNowObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedServiceNowObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedServiceNowObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedServiceNowObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedServiceNowObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java index c476bc819291..d8f27b9d9c47 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity ServiceNow server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ServiceNowSource.class, visible = true) -@JsonTypeName("ServiceNowSource") @Fluent public final class ServiceNowSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNowSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ServiceNowSource withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ServiceNowSource. + */ + public static ServiceNowSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowSource deserializedServiceNowSource = new ServiceNowSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedServiceNowSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedServiceNowSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedServiceNowSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedServiceNowSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedServiceNowSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedServiceNowSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedServiceNowSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedServiceNowSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowSource.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2AuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2AuthenticationType.java index 93e0836a9960..54f3aaa1a4ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2AuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2AuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public ServiceNowV2AuthenticationType() { * @param name a name to look for. * @return the corresponding ServiceNowV2AuthenticationType. */ - @JsonCreator public static ServiceNowV2AuthenticationType fromString(String name) { return fromString(name, ServiceNowV2AuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java index deab516f99b7..af0e71973eaf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ServiceNowV2LinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * ServiceNowV2 server linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ServiceNowV2LinkedService.class, - visible = true) -@JsonTypeName("ServiceNowV2") @Fluent public final class ServiceNowV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNowV2"; /* * ServiceNowV2 server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ServiceNowV2LinkedServiceTypeProperties innerTypeProperties = new ServiceNowV2LinkedServiceTypeProperties(); /** @@ -305,4 +297,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ServiceNowV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowV2LinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowV2LinkedService. + */ + public static ServiceNowV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowV2LinkedService deserializedServiceNowV2LinkedService = new ServiceNowV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedServiceNowV2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedServiceNowV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedServiceNowV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedServiceNowV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedServiceNowV2LinkedService.innerTypeProperties + = ServiceNowV2LinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedServiceNowV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java index e357a2890f03..51f2194eb5e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2ObjectDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * ServiceNowV2 server dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ServiceNowV2ObjectDataset.class, - visible = true) -@JsonTypeName("ServiceNowV2Object") @Fluent public final class ServiceNowV2ObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNowV2Object"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -160,4 +152,81 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowV2ObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowV2ObjectDataset if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServiceNowV2ObjectDataset. + */ + public static ServiceNowV2ObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowV2ObjectDataset deserializedServiceNowV2ObjectDataset = new ServiceNowV2ObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedServiceNowV2ObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedServiceNowV2ObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedServiceNowV2ObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowV2ObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowV2ObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java index 99f5478cf813..a4266ea79306 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServiceNowV2Source.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity ServiceNowV2 server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ServiceNowV2Source.class, visible = true) -@JsonTypeName("ServiceNowV2Source") @Fluent public final class ServiceNowV2Source extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServiceNowV2Source"; /* * Expression to filter data from source. */ - @JsonProperty(value = "expression") private ExpressionV2 expression; /** @@ -132,4 +129,72 @@ public void validate() { expression().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("expression", this.expression); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServiceNowV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServiceNowV2Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ServiceNowV2Source. + */ + public static ServiceNowV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServiceNowV2Source deserializedServiceNowV2Source = new ServiceNowV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedServiceNowV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedServiceNowV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedServiceNowV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedServiceNowV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedServiceNowV2Source.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedServiceNowV2Source.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedServiceNowV2Source.type = reader.getString(); + } else if ("expression".equals(fieldName)) { + deserializedServiceNowV2Source.expression = ExpressionV2.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServiceNowV2Source.withAdditionalProperties(additionalProperties); + + return deserializedServiceNowV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java index 23e31b0174e4..e6213da33588 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ServicePrincipalCredential.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ServicePrincipalCredentialTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Service principal credential. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = ServicePrincipalCredential.class, - visible = true) -@JsonTypeName("ServicePrincipal") @Fluent public final class ServicePrincipalCredential extends Credential { /* * Type of credential. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ServicePrincipal"; /* * Service Principal credential properties. */ - @JsonProperty(value = "typeProperties", required = true) private ServicePrincipalCredentialTypeProperties innerTypeProperties = new ServicePrincipalCredentialTypeProperties(); @@ -168,4 +161,63 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ServicePrincipalCredential.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ServicePrincipalCredential from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ServicePrincipalCredential if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ServicePrincipalCredential. + */ + public static ServicePrincipalCredential fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ServicePrincipalCredential deserializedServicePrincipalCredential = new ServicePrincipalCredential(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedServicePrincipalCredential.withDescription(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedServicePrincipalCredential.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedServicePrincipalCredential.innerTypeProperties + = ServicePrincipalCredentialTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedServicePrincipalCredential.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedServicePrincipalCredential.withAdditionalProperties(additionalProperties); + + return deserializedServicePrincipalCredential; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java index 9499d79bce5e..e443d71bcee8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SetVariableActivity.java @@ -6,37 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SetVariableActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Set value for a Variable. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SetVariableActivity.class, visible = true) -@JsonTypeName("SetVariable") @Fluent public final class SetVariableActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SetVariable"; /* * Set Variable activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private SetVariableActivityTypeProperties innerTypeProperties = new SetVariableActivityTypeProperties(); /* * Activity policy. */ - @JsonProperty(value = "policy") private SecureInputOutputPolicy policy; /** @@ -228,4 +224,82 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SetVariableActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("policy", this.policy); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SetVariableActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SetVariableActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SetVariableActivity. + */ + public static SetVariableActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SetVariableActivity deserializedSetVariableActivity = new SetVariableActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSetVariableActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSetVariableActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedSetVariableActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedSetVariableActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedSetVariableActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedSetVariableActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedSetVariableActivity.innerTypeProperties + = SetVariableActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSetVariableActivity.type = reader.getString(); + } else if ("policy".equals(fieldName)) { + deserializedSetVariableActivity.policy = SecureInputOutputPolicy.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSetVariableActivity.withAdditionalProperties(additionalProperties); + + return deserializedSetVariableActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpAuthenticationType.java index 66fe44760e4f..630f6606745c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public SftpAuthenticationType() { * @param name a name to look for. * @return the corresponding SftpAuthenticationType. */ - @JsonCreator public static SftpAuthenticationType fromString(String name) { return fromString(name, SftpAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java index 3a472f5c10e9..83afae995d1e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpLocation.java @@ -5,23 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The location of SFTP dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpLocation.class, visible = true) -@JsonTypeName("SftpLocation") @Fluent public final class SftpLocation extends DatasetLocation { /* * Type of dataset storage location. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SftpLocation"; /** @@ -67,4 +65,57 @@ public SftpLocation withFileName(Object fileName) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("folderPath", folderPath()); + jsonWriter.writeUntypedField("fileName", fileName()); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SftpLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SftpLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SftpLocation. + */ + public static SftpLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SftpLocation deserializedSftpLocation = new SftpLocation(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("folderPath".equals(fieldName)) { + deserializedSftpLocation.withFolderPath(reader.readUntyped()); + } else if ("fileName".equals(fieldName)) { + deserializedSftpLocation.withFileName(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSftpLocation.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSftpLocation.withAdditionalProperties(additionalProperties); + + return deserializedSftpLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java index 07894453f813..4de0358c281d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpReadSettings.java @@ -5,87 +5,75 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Sftp read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpReadSettings.class, visible = true) -@JsonTypeName("SftpReadSettings") @Fluent public final class SftpReadSettings extends StoreReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SftpReadSettings"; /* * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "recursive") private Object recursive; /* * Sftp wildcardFolderPath. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFolderPath") private Object wildcardFolderPath; /* * Sftp wildcardFileName. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "wildcardFileName") private Object wildcardFileName; /* * Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enablePartitionDiscovery") private Object enablePartitionDiscovery; /* * Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionRootPath") private Object partitionRootPath; /* * Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to * copy. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "fileListPath") private Object fileListPath; /* * Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "deleteFilesAfterCompletion") private Object deleteFilesAfterCompletion; /* * The start of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeStart") private Object modifiedDatetimeStart; /* * The end of file's modified datetime. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "modifiedDatetimeEnd") private Object modifiedDatetimeEnd; /* * If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "disableChunking") private Object disableChunking; /** @@ -349,4 +337,87 @@ public SftpReadSettings withDisableMetricsCollection(Object disableMetricsCollec public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("recursive", this.recursive); + jsonWriter.writeUntypedField("wildcardFolderPath", this.wildcardFolderPath); + jsonWriter.writeUntypedField("wildcardFileName", this.wildcardFileName); + jsonWriter.writeUntypedField("enablePartitionDiscovery", this.enablePartitionDiscovery); + jsonWriter.writeUntypedField("partitionRootPath", this.partitionRootPath); + jsonWriter.writeUntypedField("fileListPath", this.fileListPath); + jsonWriter.writeUntypedField("deleteFilesAfterCompletion", this.deleteFilesAfterCompletion); + jsonWriter.writeUntypedField("modifiedDatetimeStart", this.modifiedDatetimeStart); + jsonWriter.writeUntypedField("modifiedDatetimeEnd", this.modifiedDatetimeEnd); + jsonWriter.writeUntypedField("disableChunking", this.disableChunking); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SftpReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SftpReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SftpReadSettings. + */ + public static SftpReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SftpReadSettings deserializedSftpReadSettings = new SftpReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSftpReadSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSftpReadSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSftpReadSettings.type = reader.getString(); + } else if ("recursive".equals(fieldName)) { + deserializedSftpReadSettings.recursive = reader.readUntyped(); + } else if ("wildcardFolderPath".equals(fieldName)) { + deserializedSftpReadSettings.wildcardFolderPath = reader.readUntyped(); + } else if ("wildcardFileName".equals(fieldName)) { + deserializedSftpReadSettings.wildcardFileName = reader.readUntyped(); + } else if ("enablePartitionDiscovery".equals(fieldName)) { + deserializedSftpReadSettings.enablePartitionDiscovery = reader.readUntyped(); + } else if ("partitionRootPath".equals(fieldName)) { + deserializedSftpReadSettings.partitionRootPath = reader.readUntyped(); + } else if ("fileListPath".equals(fieldName)) { + deserializedSftpReadSettings.fileListPath = reader.readUntyped(); + } else if ("deleteFilesAfterCompletion".equals(fieldName)) { + deserializedSftpReadSettings.deleteFilesAfterCompletion = reader.readUntyped(); + } else if ("modifiedDatetimeStart".equals(fieldName)) { + deserializedSftpReadSettings.modifiedDatetimeStart = reader.readUntyped(); + } else if ("modifiedDatetimeEnd".equals(fieldName)) { + deserializedSftpReadSettings.modifiedDatetimeEnd = reader.readUntyped(); + } else if ("disableChunking".equals(fieldName)) { + deserializedSftpReadSettings.disableChunking = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSftpReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedSftpReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java index 57a342a721cf..14f4d9863aee 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpServerLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SftpServerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * A linked service for an SSH File Transfer Protocol (SFTP) server. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SftpServerLinkedService.class, - visible = true) -@JsonTypeName("Sftp") @Fluent public final class SftpServerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Sftp"; /* * Properties specific to this linked service type. */ - @JsonProperty(value = "typeProperties", required = true) private SftpServerLinkedServiceTypeProperties innerTypeProperties = new SftpServerLinkedServiceTypeProperties(); /** @@ -390,4 +382,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SftpServerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SftpServerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SftpServerLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SftpServerLinkedService. + */ + public static SftpServerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SftpServerLinkedService deserializedSftpServerLinkedService = new SftpServerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSftpServerLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSftpServerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSftpServerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSftpServerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSftpServerLinkedService.innerTypeProperties + = SftpServerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSftpServerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSftpServerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSftpServerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java index 7c18c505ee06..774c0d73e36f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SftpWriteSettings.java @@ -5,38 +5,34 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Sftp write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SftpWriteSettings.class, visible = true) -@JsonTypeName("SftpWriteSettings") @Fluent public final class SftpWriteSettings extends StoreWriteSettings { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SftpWriteSettings"; /* * Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "operationTimeout") private Object operationTimeout; /* * Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. * Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useTempFileRename") private Object useTempFileRename; /** @@ -144,4 +140,70 @@ public SftpWriteSettings withMetadata(List metadata) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("copyBehavior", copyBehavior()); + jsonWriter.writeArrayField("metadata", metadata(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("operationTimeout", this.operationTimeout); + jsonWriter.writeUntypedField("useTempFileRename", this.useTempFileRename); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SftpWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SftpWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SftpWriteSettings. + */ + public static SftpWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SftpWriteSettings deserializedSftpWriteSettings = new SftpWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSftpWriteSettings.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSftpWriteSettings.withDisableMetricsCollection(reader.readUntyped()); + } else if ("copyBehavior".equals(fieldName)) { + deserializedSftpWriteSettings.withCopyBehavior(reader.readUntyped()); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedSftpWriteSettings.withMetadata(metadata); + } else if ("type".equals(fieldName)) { + deserializedSftpWriteSettings.type = reader.getString(); + } else if ("operationTimeout".equals(fieldName)) { + deserializedSftpWriteSettings.operationTimeout = reader.readUntyped(); + } else if ("useTempFileRename".equals(fieldName)) { + deserializedSftpWriteSettings.useTempFileRename = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSftpWriteSettings.withAdditionalProperties(additionalProperties); + + return deserializedSftpWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java index 420ed487b908..17a6500211f0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SharePoint Online List linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SharePointOnlineListLinkedService.class, - visible = true) -@JsonTypeName("SharePointOnlineList") @Fluent public final class SharePointOnlineListLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SharePointOnlineList"; /* * SharePoint Online List linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SharePointOnlineListLinkedServiceTypeProperties innerTypeProperties = new SharePointOnlineListLinkedServiceTypeProperties(); @@ -245,4 +237,73 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SharePointOnlineListLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SharePointOnlineListLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SharePointOnlineListLinkedService if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SharePointOnlineListLinkedService. + */ + public static SharePointOnlineListLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SharePointOnlineListLinkedService deserializedSharePointOnlineListLinkedService + = new SharePointOnlineListLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSharePointOnlineListLinkedService + .withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSharePointOnlineListLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSharePointOnlineListLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSharePointOnlineListLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSharePointOnlineListLinkedService.innerTypeProperties + = SharePointOnlineListLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSharePointOnlineListLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSharePointOnlineListLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSharePointOnlineListLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java index c103ff1efdad..726d4fc54089 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListResourceDataset.java @@ -5,36 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The sharepoint online list resource dataset. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SharePointOnlineListResourceDataset.class, - visible = true) -@JsonTypeName("SharePointOnlineListResource") @Fluent public final class SharePointOnlineListResourceDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SharePointOnlineListResource"; /* * Sharepoint online list dataset properties. */ - @JsonProperty(value = "typeProperties") private SharePointOnlineListDatasetTypeProperties innerTypeProperties; /** @@ -162,4 +154,82 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SharePointOnlineListResourceDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SharePointOnlineListResourceDataset if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SharePointOnlineListResourceDataset. + */ + public static SharePointOnlineListResourceDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SharePointOnlineListResourceDataset deserializedSharePointOnlineListResourceDataset + = new SharePointOnlineListResourceDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSharePointOnlineListResourceDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSharePointOnlineListResourceDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSharePointOnlineListResourceDataset.innerTypeProperties + = SharePointOnlineListDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSharePointOnlineListResourceDataset.withAdditionalProperties(additionalProperties); + + return deserializedSharePointOnlineListResourceDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java index d7c2a66d2026..2f9e06504a9b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SharePointOnlineListSource.java @@ -5,41 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for sharePoint online list source. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SharePointOnlineListSource.class, - visible = true) -@JsonTypeName("SharePointOnlineListSource") @Fluent public final class SharePointOnlineListSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SharePointOnlineListSource"; /* * The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or * Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "httpRequestTimeout") private Object httpRequestTimeout; /** @@ -149,4 +141,69 @@ public SharePointOnlineListSource withDisableMetricsCollection(Object disableMet public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("httpRequestTimeout", this.httpRequestTimeout); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SharePointOnlineListSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SharePointOnlineListSource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SharePointOnlineListSource. + */ + public static SharePointOnlineListSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SharePointOnlineListSource deserializedSharePointOnlineListSource = new SharePointOnlineListSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSharePointOnlineListSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSharePointOnlineListSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSharePointOnlineListSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSharePointOnlineListSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSharePointOnlineListSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSharePointOnlineListSource.query = reader.readUntyped(); + } else if ("httpRequestTimeout".equals(fieldName)) { + deserializedSharePointOnlineListSource.httpRequestTimeout = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSharePointOnlineListSource.withAdditionalProperties(additionalProperties); + + return deserializedSharePointOnlineListSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java index f6eff44d1c06..60e0b7451eaa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ShopifyLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Shopify Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifyLinkedService.class, visible = true) -@JsonTypeName("Shopify") @Fluent public final class ShopifyLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Shopify"; /* * Shopify Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ShopifyLinkedServiceTypeProperties innerTypeProperties = new ShopifyLinkedServiceTypeProperties(); /** @@ -261,4 +257,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ShopifyLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ShopifyLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ShopifyLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ShopifyLinkedService. + */ + public static ShopifyLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ShopifyLinkedService deserializedShopifyLinkedService = new ShopifyLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedShopifyLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedShopifyLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedShopifyLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedShopifyLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedShopifyLinkedService.innerTypeProperties + = ShopifyLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedShopifyLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedShopifyLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedShopifyLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java index efb2611d16ae..201dbbfaa0c6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifyObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Shopify Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifyObjectDataset.class, visible = true) -@JsonTypeName("ShopifyObject") @Fluent public final class ShopifyObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ShopifyObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ShopifyObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ShopifyObjectDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ShopifyObjectDataset. + */ + public static ShopifyObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ShopifyObjectDataset deserializedShopifyObjectDataset = new ShopifyObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedShopifyObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedShopifyObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedShopifyObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedShopifyObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedShopifyObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedShopifyObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedShopifyObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedShopifyObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedShopifyObjectDataset.innerTypeProperties + = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedShopifyObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedShopifyObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java index 99c45c609ab2..656c54604fdf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ShopifySource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Shopify Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ShopifySource.class, visible = true) -@JsonTypeName("ShopifySource") @Fluent public final class ShopifySource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ShopifySource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ShopifySource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ShopifySource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ShopifySource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ShopifySource. + */ + public static ShopifySource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ShopifySource deserializedShopifySource = new ShopifySource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedShopifySource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedShopifySource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedShopifySource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedShopifySource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedShopifySource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedShopifySource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedShopifySource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedShopifySource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedShopifySource.withAdditionalProperties(additionalProperties); + + return deserializedShopifySource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java index f614fdae41f2..9dd489a28e05 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SkipErrorFile.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Skip error file. */ @Fluent -public final class SkipErrorFile { +public final class SkipErrorFile implements JsonSerializable { /* * Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "fileMissing") private Object fileMissing; /* * Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "dataInconsistency") private Object dataInconsistency; /** @@ -83,4 +85,43 @@ public SkipErrorFile withDataInconsistency(Object dataInconsistency) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("fileMissing", this.fileMissing); + jsonWriter.writeUntypedField("dataInconsistency", this.dataInconsistency); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SkipErrorFile from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SkipErrorFile if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SkipErrorFile. + */ + public static SkipErrorFile fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SkipErrorFile deserializedSkipErrorFile = new SkipErrorFile(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("fileMissing".equals(fieldName)) { + deserializedSkipErrorFile.fileMissing = reader.readUntyped(); + } else if ("dataInconsistency".equals(fieldName)) { + deserializedSkipErrorFile.dataInconsistency = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSkipErrorFile; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java index 91dace1e278f..127f124d9fbf 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SmartsheetLinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SmartsheetLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Smartsheet. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SmartsheetLinkedService.class, - visible = true) -@JsonTypeName("Smartsheet") @Fluent public final class SmartsheetLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Smartsheet"; /* * Smartsheet linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SmartsheetLinkedServiceTypeProperties innerTypeProperties = new SmartsheetLinkedServiceTypeProperties(); /** @@ -165,4 +157,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SmartsheetLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SmartsheetLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SmartsheetLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SmartsheetLinkedService. + */ + public static SmartsheetLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SmartsheetLinkedService deserializedSmartsheetLinkedService = new SmartsheetLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSmartsheetLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSmartsheetLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSmartsheetLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSmartsheetLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSmartsheetLinkedService.innerTypeProperties + = SmartsheetLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSmartsheetLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSmartsheetLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSmartsheetLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeAuthenticationType.java index 2ec121f192a2..9d4f66523846 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public SnowflakeAuthenticationType() { * @param name a name to look for. * @return the corresponding SnowflakeAuthenticationType. */ - @JsonCreator public static SnowflakeAuthenticationType fromString(String name) { return fromString(name, SnowflakeAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java index d848b7e19d44..2299f8e006be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The snowflake dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeDataset.class, visible = true) -@JsonTypeName("SnowflakeTable") @Fluent public final class SnowflakeDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeTable"; /* * Snowflake dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private SnowflakeDatasetTypeProperties innerTypeProperties = new SnowflakeDatasetTypeProperties(); /** @@ -190,4 +186,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeDataset. + */ + public static SnowflakeDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeDataset deserializedSnowflakeDataset = new SnowflakeDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSnowflakeDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSnowflakeDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSnowflakeDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSnowflakeDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSnowflakeDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSnowflakeDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSnowflakeDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSnowflakeDataset.innerTypeProperties = SnowflakeDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeDataset.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java index 218f035ebfb4..6b75dab68484 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeExportCopyCommand.java @@ -5,29 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.Map; /** * Snowflake export command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SnowflakeExportCopyCommand.class, - visible = true) -@JsonTypeName("SnowflakeExportCopyCommand") @Fluent public final class SnowflakeExportCopyCommand extends ExportSettings { /* * The export setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeExportCopyCommand"; /* @@ -35,8 +27,6 @@ public final class SnowflakeExportCopyCommand extends ExportSettings { * type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", * "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ - @JsonProperty(value = "additionalCopyOptions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalCopyOptions; /* @@ -44,15 +34,12 @@ public final class SnowflakeExportCopyCommand extends ExportSettings { * string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", * "MAX_FILE_SIZE": "'FALSE'" } */ - @JsonProperty(value = "additionalFormatOptions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalFormatOptions; /* * The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "storageIntegration") private Object storageIntegration; /** @@ -150,4 +137,64 @@ public SnowflakeExportCopyCommand withStorageIntegration(Object storageIntegrati public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeMapField("additionalCopyOptions", this.additionalCopyOptions, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("additionalFormatOptions", this.additionalFormatOptions, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("storageIntegration", this.storageIntegration); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeExportCopyCommand from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeExportCopyCommand if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SnowflakeExportCopyCommand. + */ + public static SnowflakeExportCopyCommand fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeExportCopyCommand deserializedSnowflakeExportCopyCommand = new SnowflakeExportCopyCommand(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSnowflakeExportCopyCommand.type = reader.getString(); + } else if ("additionalCopyOptions".equals(fieldName)) { + Map additionalCopyOptions = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSnowflakeExportCopyCommand.additionalCopyOptions = additionalCopyOptions; + } else if ("additionalFormatOptions".equals(fieldName)) { + Map additionalFormatOptions = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSnowflakeExportCopyCommand.additionalFormatOptions = additionalFormatOptions; + } else if ("storageIntegration".equals(fieldName)) { + deserializedSnowflakeExportCopyCommand.storageIntegration = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeExportCopyCommand.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeExportCopyCommand; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java index 062622264b89..509767daa976 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeImportCopyCommand.java @@ -5,29 +5,21 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.Map; /** * Snowflake import command settings. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SnowflakeImportCopyCommand.class, - visible = true) -@JsonTypeName("SnowflakeImportCopyCommand") @Fluent public final class SnowflakeImportCopyCommand extends ImportSettings { /* * The import setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeImportCopyCommand"; /* @@ -35,8 +27,6 @@ public final class SnowflakeImportCopyCommand extends ImportSettings { * type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", * "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ - @JsonProperty(value = "additionalCopyOptions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalCopyOptions; /* @@ -44,15 +34,12 @@ public final class SnowflakeImportCopyCommand extends ImportSettings { * string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", * "LOAD_UNCERTAIN_FILES": "'FALSE'" } */ - @JsonProperty(value = "additionalFormatOptions") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map additionalFormatOptions; /* * The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "storageIntegration") private Object storageIntegration; /** @@ -150,4 +137,64 @@ public SnowflakeImportCopyCommand withStorageIntegration(Object storageIntegrati public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeMapField("additionalCopyOptions", this.additionalCopyOptions, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeMapField("additionalFormatOptions", this.additionalFormatOptions, + (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeUntypedField("storageIntegration", this.storageIntegration); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeImportCopyCommand from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeImportCopyCommand if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SnowflakeImportCopyCommand. + */ + public static SnowflakeImportCopyCommand fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeImportCopyCommand deserializedSnowflakeImportCopyCommand = new SnowflakeImportCopyCommand(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSnowflakeImportCopyCommand.type = reader.getString(); + } else if ("additionalCopyOptions".equals(fieldName)) { + Map additionalCopyOptions = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSnowflakeImportCopyCommand.additionalCopyOptions = additionalCopyOptions; + } else if ("additionalFormatOptions".equals(fieldName)) { + Map additionalFormatOptions = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedSnowflakeImportCopyCommand.additionalFormatOptions = additionalFormatOptions; + } else if ("storageIntegration".equals(fieldName)) { + deserializedSnowflakeImportCopyCommand.storageIntegration = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeImportCopyCommand.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeImportCopyCommand; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java index c82cf68e032b..8e437842f215 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Snowflake linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeLinkedService.class, visible = true) -@JsonTypeName("Snowflake") @Fluent public final class SnowflakeLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Snowflake"; /* * Snowflake linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SnowflakeLinkedServiceTypeProperties innerTypeProperties = new SnowflakeLinkedServiceTypeProperties(); /** @@ -184,4 +180,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeLinkedService. + */ + public static SnowflakeLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeLinkedService deserializedSnowflakeLinkedService = new SnowflakeLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSnowflakeLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSnowflakeLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSnowflakeLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSnowflakeLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSnowflakeLinkedService.innerTypeProperties + = SnowflakeLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java index 317d88dd5cb6..ea8338b74de7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity snowflake sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeSink.class, visible = true) -@JsonTypeName("SnowflakeSink") @Fluent public final class SnowflakeSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeSink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * Snowflake import settings. */ - @JsonProperty(value = "importSettings") private SnowflakeImportCopyCommand importSettings; /** @@ -158,4 +154,75 @@ public void validate() { importSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeJsonField("importSettings", this.importSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SnowflakeSink. + */ + public static SnowflakeSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeSink deserializedSnowflakeSink = new SnowflakeSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSnowflakeSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSnowflakeSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSnowflakeSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSnowflakeSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSnowflakeSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSnowflakeSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSnowflakeSink.preCopyScript = reader.readUntyped(); + } else if ("importSettings".equals(fieldName)) { + deserializedSnowflakeSink.importSettings = SnowflakeImportCopyCommand.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeSink.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java index 067540e0defb..4dfcb36c60a4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeSource.java @@ -6,35 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity snowflake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeSource.class, visible = true) -@JsonTypeName("SnowflakeSource") @Fluent public final class SnowflakeSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeSource"; /* * Snowflake Sql query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Snowflake export settings. */ - @JsonProperty(value = "exportSettings", required = true) private SnowflakeExportCopyCommand exportSettings; /** @@ -146,4 +142,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeSource.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeJsonField("exportSettings", this.exportSettings); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeSource. + */ + public static SnowflakeSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeSource deserializedSnowflakeSource = new SnowflakeSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSnowflakeSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSnowflakeSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSnowflakeSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSnowflakeSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("exportSettings".equals(fieldName)) { + deserializedSnowflakeSource.exportSettings = SnowflakeExportCopyCommand.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSnowflakeSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeSource.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java index 8f1982f510b0..3b9d3f1119ab 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Dataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The snowflake dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Dataset.class, visible = true) -@JsonTypeName("SnowflakeV2Table") @Fluent public final class SnowflakeV2Dataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeV2Table"; /* * Snowflake dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private SnowflakeDatasetTypeProperties innerTypeProperties = new SnowflakeDatasetTypeProperties(); /** @@ -190,4 +186,80 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeV2Dataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeV2Dataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeV2Dataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeV2Dataset. + */ + public static SnowflakeV2Dataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeV2Dataset deserializedSnowflakeV2Dataset = new SnowflakeV2Dataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSnowflakeV2Dataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSnowflakeV2Dataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSnowflakeV2Dataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSnowflakeV2Dataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSnowflakeV2Dataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSnowflakeV2Dataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSnowflakeV2Dataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSnowflakeV2Dataset.innerTypeProperties + = SnowflakeDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeV2Dataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeV2Dataset.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeV2Dataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java index 32f34fa22eee..1fbfad9d6eb1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2LinkedService.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedV2ServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Snowflake linked service. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SnowflakeV2LinkedService.class, - visible = true) -@JsonTypeName("SnowflakeV2") @Fluent public final class SnowflakeV2LinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeV2"; /* * Snowflake linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SnowflakeLinkedV2ServiceTypeProperties innerTypeProperties = new SnowflakeLinkedV2ServiceTypeProperties(); /** @@ -430,4 +422,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeV2LinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeV2LinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeV2LinkedService if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeV2LinkedService. + */ + public static SnowflakeV2LinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeV2LinkedService deserializedSnowflakeV2LinkedService = new SnowflakeV2LinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSnowflakeV2LinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSnowflakeV2LinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSnowflakeV2LinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSnowflakeV2LinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSnowflakeV2LinkedService.innerTypeProperties + = SnowflakeLinkedV2ServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeV2LinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeV2LinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeV2LinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java index f879e9d7d47f..98f0d50391b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Sink.java @@ -5,35 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity snowflake sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Sink.class, visible = true) -@JsonTypeName("SnowflakeV2Sink") @Fluent public final class SnowflakeV2Sink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeV2Sink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * Snowflake import settings. */ - @JsonProperty(value = "importSettings") private SnowflakeImportCopyCommand importSettings; /** @@ -158,4 +154,75 @@ public void validate() { importSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeJsonField("importSettings", this.importSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeV2Sink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeV2Sink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SnowflakeV2Sink. + */ + public static SnowflakeV2Sink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeV2Sink deserializedSnowflakeV2Sink = new SnowflakeV2Sink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSnowflakeV2Sink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSnowflakeV2Sink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSnowflakeV2Sink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSnowflakeV2Sink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSnowflakeV2Sink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSnowflakeV2Sink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeV2Sink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSnowflakeV2Sink.preCopyScript = reader.readUntyped(); + } else if ("importSettings".equals(fieldName)) { + deserializedSnowflakeV2Sink.importSettings = SnowflakeImportCopyCommand.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeV2Sink.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeV2Sink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java index 1139a32014c4..d7e53901c47a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SnowflakeV2Source.java @@ -6,35 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity snowflake source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SnowflakeV2Source.class, visible = true) -@JsonTypeName("SnowflakeV2Source") @Fluent public final class SnowflakeV2Source extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SnowflakeV2Source"; /* * Snowflake Sql query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * Snowflake export settings. */ - @JsonProperty(value = "exportSettings", required = true) private SnowflakeExportCopyCommand exportSettings; /** @@ -147,4 +143,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SnowflakeV2Source.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeJsonField("exportSettings", this.exportSettings); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SnowflakeV2Source from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SnowflakeV2Source if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SnowflakeV2Source. + */ + public static SnowflakeV2Source fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SnowflakeV2Source deserializedSnowflakeV2Source = new SnowflakeV2Source(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSnowflakeV2Source.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSnowflakeV2Source.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSnowflakeV2Source.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSnowflakeV2Source.withDisableMetricsCollection(reader.readUntyped()); + } else if ("exportSettings".equals(fieldName)) { + deserializedSnowflakeV2Source.exportSettings = SnowflakeExportCopyCommand.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSnowflakeV2Source.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSnowflakeV2Source.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSnowflakeV2Source.withAdditionalProperties(additionalProperties); + + return deserializedSnowflakeV2Source; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkAuthenticationType.java index 4e45e6e2c0e0..8435339bd8be 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -48,7 +47,6 @@ public SparkAuthenticationType() { * @param name a name to look for. * @return the corresponding SparkAuthenticationType. */ - @JsonCreator public static SparkAuthenticationType fromString(String name) { return fromString(name, SparkAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java index dd4bc92ff0e7..040530c57dc0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationParametrizationReference.java @@ -6,23 +6,26 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Spark configuration reference. */ @Fluent -public final class SparkConfigurationParametrizationReference { +public final class SparkConfigurationParametrizationReference + implements JsonSerializable { /* * Spark configuration reference type. */ - @JsonProperty(value = "type", required = true) private SparkConfigurationReferenceType type; /* * Reference spark configuration name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "referenceName", required = true) private Object referenceName; /** @@ -92,4 +95,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SparkConfigurationParametrizationReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkConfigurationParametrizationReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkConfigurationParametrizationReference if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SparkConfigurationParametrizationReference. + */ + public static SparkConfigurationParametrizationReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkConfigurationParametrizationReference deserializedSparkConfigurationParametrizationReference + = new SparkConfigurationParametrizationReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSparkConfigurationParametrizationReference.type + = SparkConfigurationReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedSparkConfigurationParametrizationReference.referenceName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSparkConfigurationParametrizationReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java index d87e49ad6d0d..bc10f84800c4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkConfigurationReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -33,7 +32,6 @@ public SparkConfigurationReferenceType() { * @param name a name to look for. * @return the corresponding SparkConfigurationReferenceType. */ - @JsonCreator public static SparkConfigurationReferenceType fromString(String name) { return fromString(name, SparkConfigurationReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java index 3f8036865f13..c696f71972a1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -33,7 +32,6 @@ public SparkJobReferenceType() { * @param name a name to look for. * @return the corresponding SparkJobReferenceType. */ - @JsonCreator public static SparkJobReferenceType fromString(String name) { return fromString(name, SparkJobReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java index b8838b9a7dff..159c3397a138 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SparkLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Spark Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkLinkedService.class, visible = true) -@JsonTypeName("Spark") @Fluent public final class SparkLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Spark"; /* * Spark Server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SparkLinkedServiceTypeProperties innerTypeProperties = new SparkLinkedServiceTypeProperties(); /** @@ -449,4 +445,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SparkLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SparkLinkedService. + */ + public static SparkLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkLinkedService deserializedSparkLinkedService = new SparkLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSparkLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSparkLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSparkLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSparkLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSparkLinkedService.innerTypeProperties + = SparkLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSparkLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSparkLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSparkLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java index 860bb4031aa8..9d10a247eaef 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SparkDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Spark Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkObjectDataset.class, visible = true) -@JsonTypeName("SparkObject") @Fluent public final class SparkObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SparkObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private SparkDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SparkObjectDataset. + */ + public static SparkObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkObjectDataset deserializedSparkObjectDataset = new SparkObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSparkObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSparkObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSparkObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSparkObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSparkObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSparkObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSparkObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSparkObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSparkObjectDataset.innerTypeProperties = SparkDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSparkObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSparkObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkServerType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkServerType.java index 14ef6599e9f5..de2d52b7b55b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkServerType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkServerType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public SparkServerType() { * @param name a name to look for. * @return the corresponding SparkServerType. */ - @JsonCreator public static SparkServerType fromString(String name) { return fromString(name, SparkServerType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java index 6a64833daf85..485ae3477e2a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Spark Server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SparkSource.class, visible = true) -@JsonTypeName("SparkSource") @Fluent public final class SparkSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SparkSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public SparkSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SparkSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SparkSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SparkSource. + */ + public static SparkSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SparkSource deserializedSparkSource = new SparkSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSparkSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSparkSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSparkSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSparkSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSparkSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSparkSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSparkSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSparkSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSparkSource.withAdditionalProperties(additionalProperties); + + return deserializedSparkSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkThriftTransportProtocol.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkThriftTransportProtocol.java index 3309cf0929c8..21623f986ae5 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkThriftTransportProtocol.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkThriftTransportProtocol.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public SparkThriftTransportProtocol() { * @param name a name to look for. * @return the corresponding SparkThriftTransportProtocol. */ - @JsonCreator public static SparkThriftTransportProtocol fromString(String name) { return fromString(name, SparkThriftTransportProtocol.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedAkvAuthType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedAkvAuthType.java index 021ed14548c8..869f29d2b40b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedAkvAuthType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedAkvAuthType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -43,7 +42,6 @@ public SqlAlwaysEncryptedAkvAuthType() { * @param name a name to look for. * @return the corresponding SqlAlwaysEncryptedAkvAuthType. */ - @JsonCreator public static SqlAlwaysEncryptedAkvAuthType fromString(String name) { return fromString(name, SqlAlwaysEncryptedAkvAuthType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java index c76714e73643..ecf56023116a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlAlwaysEncryptedProperties.java @@ -6,36 +6,36 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sql always encrypted properties. */ @Fluent -public final class SqlAlwaysEncryptedProperties { +public final class SqlAlwaysEncryptedProperties implements JsonSerializable { /* * Sql always encrypted AKV authentication type. Type: string. */ - @JsonProperty(value = "alwaysEncryptedAkvAuthType", required = true) private SqlAlwaysEncryptedAkvAuthType alwaysEncryptedAkvAuthType; /* * The client ID of the application in Azure Active Directory used for Azure Key Vault authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "servicePrincipalId") private Object servicePrincipalId; /* * The key of the service principal used to authenticate against Azure Key Vault. */ - @JsonProperty(value = "servicePrincipalKey") private SecretBase servicePrincipalKey; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -149,4 +149,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SqlAlwaysEncryptedProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("alwaysEncryptedAkvAuthType", + this.alwaysEncryptedAkvAuthType == null ? null : this.alwaysEncryptedAkvAuthType.toString()); + jsonWriter.writeUntypedField("servicePrincipalId", this.servicePrincipalId); + jsonWriter.writeJsonField("servicePrincipalKey", this.servicePrincipalKey); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlAlwaysEncryptedProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlAlwaysEncryptedProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SqlAlwaysEncryptedProperties. + */ + public static SqlAlwaysEncryptedProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlAlwaysEncryptedProperties deserializedSqlAlwaysEncryptedProperties = new SqlAlwaysEncryptedProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("alwaysEncryptedAkvAuthType".equals(fieldName)) { + deserializedSqlAlwaysEncryptedProperties.alwaysEncryptedAkvAuthType + = SqlAlwaysEncryptedAkvAuthType.fromString(reader.getString()); + } else if ("servicePrincipalId".equals(fieldName)) { + deserializedSqlAlwaysEncryptedProperties.servicePrincipalId = reader.readUntyped(); + } else if ("servicePrincipalKey".equals(fieldName)) { + deserializedSqlAlwaysEncryptedProperties.servicePrincipalKey = SecretBase.fromJson(reader); + } else if ("credential".equals(fieldName)) { + deserializedSqlAlwaysEncryptedProperties.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlAlwaysEncryptedProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java index f07d3d257cfe..c5d03b0a7225 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSink.java @@ -5,81 +5,70 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL Data Warehouse sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlDWSink.class, visible = true) -@JsonTypeName("SqlDWSink") @Fluent public final class SqlDWSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlDWSink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "allowPolyBase") private Object allowPolyBase; /* * Specifies PolyBase-related settings when allowPolyBase is true. */ - @JsonProperty(value = "polyBaseSettings") private PolybaseSettings polyBaseSettings; /* * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "allowCopyCommand") private Object allowCopyCommand; /* * Specifies Copy Command related settings when allowCopyCommand is true. */ - @JsonProperty(value = "copyCommandSettings") private DWCopyCommandSettings copyCommandSettings; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "sqlWriterUseTableLock") private Object sqlWriterUseTableLock; /* * Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType * SqlDWWriteBehaviorEnum) */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /* * SQL DW upsert settings. */ - @JsonProperty(value = "upsertSettings") private SqlDWUpsertSettings upsertSettings; /** @@ -360,4 +349,96 @@ public void validate() { upsertSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("allowPolyBase", this.allowPolyBase); + jsonWriter.writeJsonField("polyBaseSettings", this.polyBaseSettings); + jsonWriter.writeUntypedField("allowCopyCommand", this.allowCopyCommand); + jsonWriter.writeJsonField("copyCommandSettings", this.copyCommandSettings); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("sqlWriterUseTableLock", this.sqlWriterUseTableLock); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + jsonWriter.writeJsonField("upsertSettings", this.upsertSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlDWSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlDWSink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SqlDWSink. + */ + public static SqlDWSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlDWSink deserializedSqlDWSink = new SqlDWSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSqlDWSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSqlDWSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSqlDWSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSqlDWSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlDWSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlDWSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlDWSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSqlDWSink.preCopyScript = reader.readUntyped(); + } else if ("allowPolyBase".equals(fieldName)) { + deserializedSqlDWSink.allowPolyBase = reader.readUntyped(); + } else if ("polyBaseSettings".equals(fieldName)) { + deserializedSqlDWSink.polyBaseSettings = PolybaseSettings.fromJson(reader); + } else if ("allowCopyCommand".equals(fieldName)) { + deserializedSqlDWSink.allowCopyCommand = reader.readUntyped(); + } else if ("copyCommandSettings".equals(fieldName)) { + deserializedSqlDWSink.copyCommandSettings = DWCopyCommandSettings.fromJson(reader); + } else if ("tableOption".equals(fieldName)) { + deserializedSqlDWSink.tableOption = reader.readUntyped(); + } else if ("sqlWriterUseTableLock".equals(fieldName)) { + deserializedSqlDWSink.sqlWriterUseTableLock = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSqlDWSink.writeBehavior = reader.readUntyped(); + } else if ("upsertSettings".equals(fieldName)) { + deserializedSqlDWSink.upsertSettings = SqlDWUpsertSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlDWSink.withAdditionalProperties(additionalProperties); + + return deserializedSqlDWSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java index e6bacba34f68..249f2b25150a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWSource.java @@ -5,43 +5,38 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL Data Warehouse source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlDWSource.class, visible = true) -@JsonTypeName("SqlDWSource") @Fluent public final class SqlDWSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlDWSource"; /* * SQL Data Warehouse reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as * SqlReaderQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: * object (or Expression with resultType object), itemType: StoredProcedureParameter. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -49,20 +44,17 @@ public final class SqlDWSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -283,4 +275,87 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlDWSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlDWSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlDWSource. + */ + public static SqlDWSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlDWSource deserializedSqlDWSource = new SqlDWSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSqlDWSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSqlDWSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlDWSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlDWSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSqlDWSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSqlDWSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlDWSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedSqlDWSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedSqlDWSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlDWSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedSqlDWSource.isolationLevel = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSqlDWSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSqlDWSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlDWSource.withAdditionalProperties(additionalProperties); + + return deserializedSqlDWSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java index a8fcb543b207..439ac69824e4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlDWUpsertSettings.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sql DW upsert option settings. */ @Fluent -public final class SqlDWUpsertSettings { +public final class SqlDWUpsertSettings implements JsonSerializable { /* * Schema name for interim table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "interimSchemaName") private Object interimSchemaName; /* * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of * strings). */ - @JsonProperty(value = "keys") private Object keys; /** @@ -82,4 +84,43 @@ public SqlDWUpsertSettings withKeys(Object keys) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("interimSchemaName", this.interimSchemaName); + jsonWriter.writeUntypedField("keys", this.keys); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlDWUpsertSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlDWUpsertSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlDWUpsertSettings. + */ + public static SqlDWUpsertSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlDWUpsertSettings deserializedSqlDWUpsertSettings = new SqlDWUpsertSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("interimSchemaName".equals(fieldName)) { + deserializedSqlDWUpsertSettings.interimSchemaName = reader.readUntyped(); + } else if ("keys".equals(fieldName)) { + deserializedSqlDWUpsertSettings.keys = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlDWUpsertSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java index 8c738b640968..e7c33816b5d8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISink.java @@ -5,78 +5,67 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure SQL Managed Instance sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlMISink.class, visible = true) -@JsonTypeName("SqlMISink") @Fluent public final class SqlMISink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlMISink"; /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterStoredProcedureName") private Object sqlWriterStoredProcedureName; /* * SQL writer table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterTableType") private Object sqlWriterTableType; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * SQL stored procedure parameters. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "storedProcedureTableTypeParameterName") private Object storedProcedureTableTypeParameterName; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "sqlWriterUseTableLock") private Object sqlWriterUseTableLock; /* * White behavior when copying data into azure SQL MI. Type: string (or Expression with resultType string) */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /* * SQL upsert settings. */ - @JsonProperty(value = "upsertSettings") private SqlUpsertSettings upsertSettings; /** @@ -351,4 +340,97 @@ public void validate() { upsertSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlWriterStoredProcedureName", this.sqlWriterStoredProcedureName); + jsonWriter.writeUntypedField("sqlWriterTableType", this.sqlWriterTableType); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("storedProcedureTableTypeParameterName", + this.storedProcedureTableTypeParameterName); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("sqlWriterUseTableLock", this.sqlWriterUseTableLock); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + jsonWriter.writeJsonField("upsertSettings", this.upsertSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlMISink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlMISink if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SqlMISink. + */ + public static SqlMISink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlMISink deserializedSqlMISink = new SqlMISink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSqlMISink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSqlMISink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSqlMISink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSqlMISink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlMISink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlMISink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlMISink.type = reader.getString(); + } else if ("sqlWriterStoredProcedureName".equals(fieldName)) { + deserializedSqlMISink.sqlWriterStoredProcedureName = reader.readUntyped(); + } else if ("sqlWriterTableType".equals(fieldName)) { + deserializedSqlMISink.sqlWriterTableType = reader.readUntyped(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSqlMISink.preCopyScript = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlMISink.storedProcedureParameters = reader.readUntyped(); + } else if ("storedProcedureTableTypeParameterName".equals(fieldName)) { + deserializedSqlMISink.storedProcedureTableTypeParameterName = reader.readUntyped(); + } else if ("tableOption".equals(fieldName)) { + deserializedSqlMISink.tableOption = reader.readUntyped(); + } else if ("sqlWriterUseTableLock".equals(fieldName)) { + deserializedSqlMISink.sqlWriterUseTableLock = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSqlMISink.writeBehavior = reader.readUntyped(); + } else if ("upsertSettings".equals(fieldName)) { + deserializedSqlMISink.upsertSettings = SqlUpsertSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlMISink.withAdditionalProperties(additionalProperties); + + return deserializedSqlMISink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java index 418e4983cce0..177dc0bb048c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlMISource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Azure SQL Managed Instance source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlMISource.class, visible = true) -@JsonTypeName("SqlMISource") @Fluent public final class SqlMISource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlMISource"; /* * SQL reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as * SqlReaderQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -48,26 +43,22 @@ public final class SqlMISource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * Which additional types to produce. */ - @JsonProperty(value = "produceAdditionalTypes") private Object produceAdditionalTypes; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -306,4 +297,90 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("produceAdditionalTypes", this.produceAdditionalTypes); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlMISource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlMISource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlMISource. + */ + public static SqlMISource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlMISource deserializedSqlMISource = new SqlMISource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSqlMISource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSqlMISource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlMISource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlMISource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSqlMISource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSqlMISource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlMISource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedSqlMISource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedSqlMISource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlMISource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedSqlMISource.isolationLevel = reader.readUntyped(); + } else if ("produceAdditionalTypes".equals(fieldName)) { + deserializedSqlMISource.produceAdditionalTypes = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSqlMISource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSqlMISource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlMISource.withAdditionalProperties(additionalProperties); + + return deserializedSqlMISource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java index 5047336c5a31..2f011b419025 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlPartitionSettings.java @@ -5,19 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for Sql source partitioning. */ @Fluent -public final class SqlPartitionSettings { +public final class SqlPartitionSettings implements JsonSerializable { /* * The name of the column in integer or datetime type that will be used for proceeding partitioning. If not * specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* @@ -25,7 +28,6 @@ public final class SqlPartitionSettings { * partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned * and copied. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* @@ -33,7 +35,6 @@ public final class SqlPartitionSettings { * partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned * and copied. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /** @@ -121,4 +122,46 @@ public SqlPartitionSettings withPartitionLowerBound(Object partitionLowerBound) */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlPartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlPartitionSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlPartitionSettings. + */ + public static SqlPartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlPartitionSettings deserializedSqlPartitionSettings = new SqlPartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionColumnName".equals(fieldName)) { + deserializedSqlPartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedSqlPartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedSqlPartitionSettings.partitionLowerBound = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlPartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerAuthenticationType.java index 97b09d8fe703..f4f1e7256587 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -43,7 +42,6 @@ public SqlServerAuthenticationType() { * @param name a name to look for. * @return the corresponding SqlServerAuthenticationType. */ - @JsonCreator public static SqlServerAuthenticationType fromString(String name) { return fromString(name, SqlServerAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerBaseLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerBaseLinkedServiceTypeProperties.java index 2c5fb3745ad4..d5d930fe7883 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerBaseLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerBaseLinkedServiceTypeProperties.java @@ -5,24 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sql Server family connector common linked service properties. */ @Fluent -public class SqlServerBaseLinkedServiceTypeProperties { +public class SqlServerBaseLinkedServiceTypeProperties + implements JsonSerializable { /* * The name or network address of the instance of SQL Server to which to connect, used by recommended version. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "server") private Object server; /* * The name of the database, used by recommended version. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "database") private Object database; /* @@ -30,14 +33,12 @@ public class SqlServerBaseLinkedServiceTypeProperties { * version. Possible values are true/yes/mandatory, false/no/optional and strict. Type: string (or Expression with * resultType string). */ - @JsonProperty(value = "encrypt") private Object encrypt; /* * Indicate whether the channel will be encrypted while bypassing walking the certificate chain to validate trust, * used by recommended version. Type: Boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "trustServerCertificate") private Object trustServerCertificate; /* @@ -45,21 +46,18 @@ public class SqlServerBaseLinkedServiceTypeProperties { * name from the Data Source is used for certificate validation, used by recommended version. Type: string (or * Expression with resultType string). */ - @JsonProperty(value = "hostNameInCertificate") private Object hostnameInCertificate; /* * The application workload type when connecting to a server, used by recommended version. Possible values are * ReadOnly and ReadWrite. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "applicationIntent") private Object applicationIntent; /* * The length of time (in seconds) to wait for a connection to the server before terminating the attempt and * generating an error, used by recommended version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "connectTimeout") private Object connectTimeout; /* @@ -67,7 +65,6 @@ public class SqlServerBaseLinkedServiceTypeProperties { * recommended version. This must be an integer between 0 and 255. Type: integer (or Expression with resultType * integer). */ - @JsonProperty(value = "connectRetryCount") private Object connectRetryCount; /* @@ -75,21 +72,18 @@ public class SqlServerBaseLinkedServiceTypeProperties { * connection failure, used by recommended version. This must be an integer between 1 and 60. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "connectRetryInterval") private Object connectRetryInterval; /* * The minimum time, in seconds, for the connection to live in the connection pool before being destroyed, used by * recommended version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "loadBalanceTimeout") private Object loadBalanceTimeout; /* * The default wait time (in seconds) before terminating the attempt to execute a command and generating an error, * used by recommended version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "commandTimeout") private Object commandTimeout; /* @@ -97,28 +91,24 @@ public class SqlServerBaseLinkedServiceTypeProperties { * account credentials are used for authentication (when true), used by recommended version. Type: Boolean (or * Expression with resultType boolean). */ - @JsonProperty(value = "integratedSecurity") private Object integratedSecurity; /* * The name or address of the partner server to connect to if the primary server is down, used by recommended * version. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "failoverPartner") private Object failoverPartner; /* * The maximum number of connections allowed in the connection pool for this specific connection string, used by * recommended version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "maxPoolSize") private Object maxPoolSize; /* * The minimum number of connections allowed in the connection pool for this specific connection string, used by * recommended version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "minPoolSize") private Object minPoolSize; /* @@ -126,7 +116,6 @@ public class SqlServerBaseLinkedServiceTypeProperties { * process or cancel all result sets from one batch before it can execute any other batch on that connection, used * by recommended version. Type: Boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "multipleActiveResultSets") private Object multipleActiveResultSets; /* @@ -134,21 +123,18 @@ public class SqlServerBaseLinkedServiceTypeProperties { * MultiSubnetFailover=true provides faster detection of and connection to the (currently) active server, used by * recommended version. Type: Boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "multiSubnetFailover") private Object multiSubnetFailover; /* * The size in bytes of the network packets used to communicate with an instance of server, used by recommended * version. Type: integer (or Expression with resultType integer). */ - @JsonProperty(value = "packetSize") private Object packetSize; /* * Indicate whether the connection will be pooled or explicitly opened every time that the connection is requested, * used by recommended version. Type: Boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "pooling") private Object pooling; /** @@ -606,4 +592,96 @@ public SqlServerBaseLinkedServiceTypeProperties withPooling(Object pooling) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("server", this.server); + jsonWriter.writeUntypedField("database", this.database); + jsonWriter.writeUntypedField("encrypt", this.encrypt); + jsonWriter.writeUntypedField("trustServerCertificate", this.trustServerCertificate); + jsonWriter.writeUntypedField("hostNameInCertificate", this.hostnameInCertificate); + jsonWriter.writeUntypedField("applicationIntent", this.applicationIntent); + jsonWriter.writeUntypedField("connectTimeout", this.connectTimeout); + jsonWriter.writeUntypedField("connectRetryCount", this.connectRetryCount); + jsonWriter.writeUntypedField("connectRetryInterval", this.connectRetryInterval); + jsonWriter.writeUntypedField("loadBalanceTimeout", this.loadBalanceTimeout); + jsonWriter.writeUntypedField("commandTimeout", this.commandTimeout); + jsonWriter.writeUntypedField("integratedSecurity", this.integratedSecurity); + jsonWriter.writeUntypedField("failoverPartner", this.failoverPartner); + jsonWriter.writeUntypedField("maxPoolSize", this.maxPoolSize); + jsonWriter.writeUntypedField("minPoolSize", this.minPoolSize); + jsonWriter.writeUntypedField("multipleActiveResultSets", this.multipleActiveResultSets); + jsonWriter.writeUntypedField("multiSubnetFailover", this.multiSubnetFailover); + jsonWriter.writeUntypedField("packetSize", this.packetSize); + jsonWriter.writeUntypedField("pooling", this.pooling); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerBaseLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerBaseLinkedServiceTypeProperties if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlServerBaseLinkedServiceTypeProperties. + */ + public static SqlServerBaseLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerBaseLinkedServiceTypeProperties deserializedSqlServerBaseLinkedServiceTypeProperties + = new SqlServerBaseLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("server".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.server = reader.readUntyped(); + } else if ("database".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.database = reader.readUntyped(); + } else if ("encrypt".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.encrypt = reader.readUntyped(); + } else if ("trustServerCertificate".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.trustServerCertificate = reader.readUntyped(); + } else if ("hostNameInCertificate".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.hostnameInCertificate = reader.readUntyped(); + } else if ("applicationIntent".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.applicationIntent = reader.readUntyped(); + } else if ("connectTimeout".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.connectTimeout = reader.readUntyped(); + } else if ("connectRetryCount".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.connectRetryCount = reader.readUntyped(); + } else if ("connectRetryInterval".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.connectRetryInterval = reader.readUntyped(); + } else if ("loadBalanceTimeout".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.loadBalanceTimeout = reader.readUntyped(); + } else if ("commandTimeout".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.commandTimeout = reader.readUntyped(); + } else if ("integratedSecurity".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.integratedSecurity = reader.readUntyped(); + } else if ("failoverPartner".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.failoverPartner = reader.readUntyped(); + } else if ("maxPoolSize".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.maxPoolSize = reader.readUntyped(); + } else if ("minPoolSize".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.minPoolSize = reader.readUntyped(); + } else if ("multipleActiveResultSets".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.multipleActiveResultSets + = reader.readUntyped(); + } else if ("multiSubnetFailover".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.multiSubnetFailover = reader.readUntyped(); + } else if ("packetSize".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.packetSize = reader.readUntyped(); + } else if ("pooling".equals(fieldName)) { + deserializedSqlServerBaseLinkedServiceTypeProperties.pooling = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlServerBaseLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java index f971132567ba..99f20db4bf44 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * SQL Server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerLinkedService.class, visible = true) -@JsonTypeName("SqlServer") @Fluent public final class SqlServerLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlServer"; /* * SQL Server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SqlServerLinkedServiceTypeProperties innerTypeProperties = new SqlServerLinkedServiceTypeProperties(); /** @@ -779,4 +775,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SqlServerLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SqlServerLinkedService. + */ + public static SqlServerLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerLinkedService deserializedSqlServerLinkedService = new SqlServerLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSqlServerLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSqlServerLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSqlServerLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSqlServerLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSqlServerLinkedService.innerTypeProperties + = SqlServerLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSqlServerLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlServerLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSqlServerLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java index 27593a678a14..f9adf973ed62 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSink.java @@ -5,78 +5,67 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL server sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerSink.class, visible = true) -@JsonTypeName("SqlServerSink") @Fluent public final class SqlServerSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlServerSink"; /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterStoredProcedureName") private Object sqlWriterStoredProcedureName; /* * SQL writer table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterTableType") private Object sqlWriterTableType; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * SQL stored procedure parameters. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "storedProcedureTableTypeParameterName") private Object storedProcedureTableTypeParameterName; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "sqlWriterUseTableLock") private Object sqlWriterUseTableLock; /* * Write behavior when copying data into sql server. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /* * SQL upsert settings. */ - @JsonProperty(value = "upsertSettings") private SqlUpsertSettings upsertSettings; /** @@ -351,4 +340,97 @@ public void validate() { upsertSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlWriterStoredProcedureName", this.sqlWriterStoredProcedureName); + jsonWriter.writeUntypedField("sqlWriterTableType", this.sqlWriterTableType); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("storedProcedureTableTypeParameterName", + this.storedProcedureTableTypeParameterName); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("sqlWriterUseTableLock", this.sqlWriterUseTableLock); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + jsonWriter.writeJsonField("upsertSettings", this.upsertSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlServerSink. + */ + public static SqlServerSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerSink deserializedSqlServerSink = new SqlServerSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSqlServerSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSqlServerSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSqlServerSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSqlServerSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlServerSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlServerSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlServerSink.type = reader.getString(); + } else if ("sqlWriterStoredProcedureName".equals(fieldName)) { + deserializedSqlServerSink.sqlWriterStoredProcedureName = reader.readUntyped(); + } else if ("sqlWriterTableType".equals(fieldName)) { + deserializedSqlServerSink.sqlWriterTableType = reader.readUntyped(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSqlServerSink.preCopyScript = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlServerSink.storedProcedureParameters = reader.readUntyped(); + } else if ("storedProcedureTableTypeParameterName".equals(fieldName)) { + deserializedSqlServerSink.storedProcedureTableTypeParameterName = reader.readUntyped(); + } else if ("tableOption".equals(fieldName)) { + deserializedSqlServerSink.tableOption = reader.readUntyped(); + } else if ("sqlWriterUseTableLock".equals(fieldName)) { + deserializedSqlServerSink.sqlWriterUseTableLock = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSqlServerSink.writeBehavior = reader.readUntyped(); + } else if ("upsertSettings".equals(fieldName)) { + deserializedSqlServerSink.upsertSettings = SqlUpsertSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlServerSink.withAdditionalProperties(additionalProperties); + + return deserializedSqlServerSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java index fab02497d518..ca2190064095 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerSource.class, visible = true) -@JsonTypeName("SqlServerSource") @Fluent public final class SqlServerSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlServerSource"; /* * SQL reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -48,26 +43,22 @@ public final class SqlServerSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * Which additional types to produce. */ - @JsonProperty(value = "produceAdditionalTypes") private Object produceAdditionalTypes; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -304,4 +295,90 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("produceAdditionalTypes", this.produceAdditionalTypes); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlServerSource. + */ + public static SqlServerSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerSource deserializedSqlServerSource = new SqlServerSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSqlServerSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSqlServerSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlServerSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlServerSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSqlServerSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSqlServerSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlServerSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedSqlServerSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedSqlServerSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlServerSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedSqlServerSource.isolationLevel = reader.readUntyped(); + } else if ("produceAdditionalTypes".equals(fieldName)) { + deserializedSqlServerSource.produceAdditionalTypes = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSqlServerSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSqlServerSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlServerSource.withAdditionalProperties(additionalProperties); + + return deserializedSqlServerSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java index 0f28fe4cbb01..33fb0e966ad2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerStoredProcedureActivity.java @@ -6,35 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerStoredProcedureActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * SQL stored procedure activity type. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SqlServerStoredProcedureActivity.class, - visible = true) -@JsonTypeName("SqlServerStoredProcedure") @Fluent public final class SqlServerStoredProcedureActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlServerStoredProcedure"; /* * SQL stored procedure activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private SqlServerStoredProcedureActivityTypeProperties innerTypeProperties = new SqlServerStoredProcedureActivityTypeProperties(); @@ -201,4 +194,88 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SqlServerStoredProcedureActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerStoredProcedureActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerStoredProcedureActivity if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SqlServerStoredProcedureActivity. + */ + public static SqlServerStoredProcedureActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerStoredProcedureActivity deserializedSqlServerStoredProcedureActivity + = new SqlServerStoredProcedureActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity + .withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedSqlServerStoredProcedureActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedSqlServerStoredProcedureActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity.innerTypeProperties + = SqlServerStoredProcedureActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSqlServerStoredProcedureActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlServerStoredProcedureActivity.withAdditionalProperties(additionalProperties); + + return deserializedSqlServerStoredProcedureActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java index 393faeaf8b87..c06fd03f6ac3 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlServerTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SqlServerTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The on-premises SQL Server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlServerTableDataset.class, visible = true) -@JsonTypeName("SqlServerTable") @Fluent public final class SqlServerTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlServerTable"; /* * On-premises SQL Server dataset properties. */ - @JsonProperty(value = "typeProperties") private SqlServerTableDatasetTypeProperties innerTypeProperties; /** @@ -208,4 +204,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlServerTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlServerTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SqlServerTableDataset. + */ + public static SqlServerTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlServerTableDataset deserializedSqlServerTableDataset = new SqlServerTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSqlServerTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSqlServerTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSqlServerTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSqlServerTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSqlServerTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSqlServerTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSqlServerTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSqlServerTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSqlServerTableDataset.innerTypeProperties + = SqlServerTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlServerTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedSqlServerTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java index 0357183fa7b7..9b6d72a6a31f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSink.java @@ -5,78 +5,67 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlSink.class, visible = true) -@JsonTypeName("SqlSink") @Fluent public final class SqlSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlSink"; /* * SQL writer stored procedure name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterStoredProcedureName") private Object sqlWriterStoredProcedureName; /* * SQL writer table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlWriterTableType") private Object sqlWriterTableType; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * SQL stored procedure parameters. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "storedProcedureTableTypeParameterName") private Object storedProcedureTableTypeParameterName; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "sqlWriterUseTableLock") private Object sqlWriterUseTableLock; /* * Write behavior when copying data into sql. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /* * SQL upsert settings. */ - @JsonProperty(value = "upsertSettings") private SqlUpsertSettings upsertSettings; /** @@ -351,4 +340,97 @@ public void validate() { upsertSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlWriterStoredProcedureName", this.sqlWriterStoredProcedureName); + jsonWriter.writeUntypedField("sqlWriterTableType", this.sqlWriterTableType); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("storedProcedureTableTypeParameterName", + this.storedProcedureTableTypeParameterName); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("sqlWriterUseTableLock", this.sqlWriterUseTableLock); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + jsonWriter.writeJsonField("upsertSettings", this.upsertSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlSink if the JsonReader was pointing to an instance of it, or null if it was pointing to + * JSON null. + * @throws IOException If an error occurs while reading the SqlSink. + */ + public static SqlSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlSink deserializedSqlSink = new SqlSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedSqlSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedSqlSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedSqlSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedSqlSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlSink.type = reader.getString(); + } else if ("sqlWriterStoredProcedureName".equals(fieldName)) { + deserializedSqlSink.sqlWriterStoredProcedureName = reader.readUntyped(); + } else if ("sqlWriterTableType".equals(fieldName)) { + deserializedSqlSink.sqlWriterTableType = reader.readUntyped(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedSqlSink.preCopyScript = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlSink.storedProcedureParameters = reader.readUntyped(); + } else if ("storedProcedureTableTypeParameterName".equals(fieldName)) { + deserializedSqlSink.storedProcedureTableTypeParameterName = reader.readUntyped(); + } else if ("tableOption".equals(fieldName)) { + deserializedSqlSink.tableOption = reader.readUntyped(); + } else if ("sqlWriterUseTableLock".equals(fieldName)) { + deserializedSqlSink.sqlWriterUseTableLock = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedSqlSink.writeBehavior = reader.readUntyped(); + } else if ("upsertSettings".equals(fieldName)) { + deserializedSqlSink.upsertSettings = SqlUpsertSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlSink.withAdditionalProperties(additionalProperties); + + return deserializedSqlSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java index 027219d25f6b..00fee4021d0e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity SQL source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SqlSource.class, visible = true) -@JsonTypeName("SqlSource") @Fluent public final class SqlSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SqlSource"; /* * SQL reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -48,20 +43,17 @@ public final class SqlSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -278,4 +270,87 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SqlSource. + */ + public static SqlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlSource deserializedSqlSource = new SqlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSqlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSqlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSqlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSqlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSqlSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSqlSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSqlSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedSqlSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedSqlSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedSqlSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedSqlSource.isolationLevel = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedSqlSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedSqlSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSqlSource.withAdditionalProperties(additionalProperties); + + return deserializedSqlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java index 8ed9b4137058..2835f4f0aa58 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SqlUpsertSettings.java @@ -5,30 +5,31 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Sql upsert option settings. */ @Fluent -public final class SqlUpsertSettings { +public final class SqlUpsertSettings implements JsonSerializable { /* * Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "useTempDB") private Object useTempDB; /* * Schema name for interim table. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "interimSchemaName") private Object interimSchemaName; /* * Key column names for unique row identification. Type: array of strings (or Expression with resultType array of * strings). */ - @JsonProperty(value = "keys") private Object keys; /** @@ -110,4 +111,46 @@ public SqlUpsertSettings withKeys(Object keys) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("useTempDB", this.useTempDB); + jsonWriter.writeUntypedField("interimSchemaName", this.interimSchemaName); + jsonWriter.writeUntypedField("keys", this.keys); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SqlUpsertSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SqlUpsertSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SqlUpsertSettings. + */ + public static SqlUpsertSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SqlUpsertSettings deserializedSqlUpsertSettings = new SqlUpsertSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("useTempDB".equals(fieldName)) { + deserializedSqlUpsertSettings.useTempDB = reader.readUntyped(); + } else if ("interimSchemaName".equals(fieldName)) { + deserializedSqlUpsertSettings.interimSchemaName = reader.readUntyped(); + } else if ("keys".equals(fieldName)) { + deserializedSqlUpsertSettings.keys = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSqlUpsertSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java index b34f1af7d300..177f46cd5168 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SquareLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Square Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareLinkedService.class, visible = true) -@JsonTypeName("Square") @Fluent public final class SquareLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Square"; /* * Square Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SquareLinkedServiceTypeProperties innerTypeProperties = new SquareLinkedServiceTypeProperties(); /** @@ -332,4 +328,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SquareLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SquareLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SquareLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SquareLinkedService. + */ + public static SquareLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SquareLinkedService deserializedSquareLinkedService = new SquareLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSquareLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSquareLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSquareLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSquareLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSquareLinkedService.innerTypeProperties + = SquareLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSquareLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSquareLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSquareLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java index 46d8e9b0711b..7180cedf1c9c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Square Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareObjectDataset.class, visible = true) -@JsonTypeName("SquareObject") @Fluent public final class SquareObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SquareObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SquareObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SquareObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SquareObjectDataset. + */ + public static SquareObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SquareObjectDataset deserializedSquareObjectDataset = new SquareObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSquareObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSquareObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSquareObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSquareObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSquareObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSquareObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSquareObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSquareObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSquareObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSquareObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedSquareObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java index 4662f5f3f61b..a73ba05dc57e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SquareSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Square Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SquareSource.class, visible = true) -@JsonTypeName("SquareSource") @Fluent public final class SquareSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SquareSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public SquareSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SquareSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SquareSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SquareSource. + */ + public static SquareSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SquareSource deserializedSquareSource = new SquareSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSquareSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSquareSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSquareSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSquareSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSquareSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSquareSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSquareSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSquareSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSquareSource.withAdditionalProperties(additionalProperties); + + return deserializedSquareSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java index b8b1a1f6b14a..e969c01b72bd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisAccessCredential.java @@ -6,29 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS access credential. */ @Fluent -public final class SsisAccessCredential { +public final class SsisAccessCredential implements JsonSerializable { /* * Domain for windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "domain", required = true) private Object domain; /* * UseName for windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName", required = true) private Object username; /* * Password for windows authentication. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /** @@ -122,4 +123,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisAccessCredential.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisAccessCredential from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisAccessCredential if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisAccessCredential. + */ + public static SsisAccessCredential fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisAccessCredential deserializedSsisAccessCredential = new SsisAccessCredential(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("domain".equals(fieldName)) { + deserializedSsisAccessCredential.domain = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSsisAccessCredential.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSsisAccessCredential.password = SecretBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisAccessCredential; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java index 6b6ccecf8a44..379c75b6eb3f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisChildPackage.java @@ -6,35 +6,35 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS embedded child package. */ @Fluent -public final class SsisChildPackage { +public final class SsisChildPackage implements JsonSerializable { /* * Path for embedded child package. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "packagePath", required = true) private Object packagePath; /* * Name for embedded child package. */ - @JsonProperty(value = "packageName") private String packageName; /* * Content for embedded child package. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "packageContent", required = true) private Object packageContent; /* * Last modified date for embedded child package. */ - @JsonProperty(value = "packageLastModifiedDate") private String packageLastModifiedDate; /** @@ -145,4 +145,50 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisChildPackage.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("packagePath", this.packagePath); + jsonWriter.writeUntypedField("packageContent", this.packageContent); + jsonWriter.writeStringField("packageName", this.packageName); + jsonWriter.writeStringField("packageLastModifiedDate", this.packageLastModifiedDate); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisChildPackage from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisChildPackage if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisChildPackage. + */ + public static SsisChildPackage fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisChildPackage deserializedSsisChildPackage = new SsisChildPackage(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("packagePath".equals(fieldName)) { + deserializedSsisChildPackage.packagePath = reader.readUntyped(); + } else if ("packageContent".equals(fieldName)) { + deserializedSsisChildPackage.packageContent = reader.readUntyped(); + } else if ("packageName".equals(fieldName)) { + deserializedSsisChildPackage.packageName = reader.getString(); + } else if ("packageLastModifiedDate".equals(fieldName)) { + deserializedSsisChildPackage.packageLastModifiedDate = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisChildPackage; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java index 95ab4daeba05..6e5efbce0abc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironment.java @@ -5,36 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Ssis environment. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisEnvironment.class, visible = true) -@JsonTypeName("Environment") @Fluent public final class SsisEnvironment extends SsisObjectMetadata { /* * Type of metadata. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private SsisObjectMetadataType type = SsisObjectMetadataType.ENVIRONMENT; /* * Folder id which contains environment. */ - @JsonProperty(value = "folderId") private Long folderId; /* * Variable in environment */ - @JsonProperty(value = "variables") private List variables; /** @@ -132,4 +126,56 @@ public void validate() { variables().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", id()); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeNumberField("folderId", this.folderId); + jsonWriter.writeArrayField("variables", this.variables, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisEnvironment from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisEnvironment if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisEnvironment. + */ + public static SsisEnvironment fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisEnvironment deserializedSsisEnvironment = new SsisEnvironment(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisEnvironment.withId(reader.getNullable(JsonReader::getLong)); + } else if ("name".equals(fieldName)) { + deserializedSsisEnvironment.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSsisEnvironment.withDescription(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedSsisEnvironment.type = SsisObjectMetadataType.fromString(reader.getString()); + } else if ("folderId".equals(fieldName)) { + deserializedSsisEnvironment.folderId = reader.getNullable(JsonReader::getLong); + } else if ("variables".equals(fieldName)) { + List variables = reader.readArray(reader1 -> SsisVariable.fromJson(reader1)); + deserializedSsisEnvironment.variables = variables; + } else { + reader.skipChildren(); + } + } + + return deserializedSsisEnvironment; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironmentReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironmentReference.java index be03496f8cff..37f3782a2863 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironmentReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisEnvironmentReference.java @@ -5,35 +5,35 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Ssis environment reference. */ @Fluent -public final class SsisEnvironmentReference { +public final class SsisEnvironmentReference implements JsonSerializable { /* * Environment reference id. */ - @JsonProperty(value = "id") private Long id; /* * Environment folder name. */ - @JsonProperty(value = "environmentFolderName") private String environmentFolderName; /* * Environment name. */ - @JsonProperty(value = "environmentName") private String environmentName; /* * Reference type */ - @JsonProperty(value = "referenceType") private String referenceType; /** @@ -129,4 +129,49 @@ public SsisEnvironmentReference withReferenceType(String referenceType) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", this.id); + jsonWriter.writeStringField("environmentFolderName", this.environmentFolderName); + jsonWriter.writeStringField("environmentName", this.environmentName); + jsonWriter.writeStringField("referenceType", this.referenceType); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisEnvironmentReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisEnvironmentReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisEnvironmentReference. + */ + public static SsisEnvironmentReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisEnvironmentReference deserializedSsisEnvironmentReference = new SsisEnvironmentReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisEnvironmentReference.id = reader.getNullable(JsonReader::getLong); + } else if ("environmentFolderName".equals(fieldName)) { + deserializedSsisEnvironmentReference.environmentFolderName = reader.getString(); + } else if ("environmentName".equals(fieldName)) { + deserializedSsisEnvironmentReference.environmentName = reader.getString(); + } else if ("referenceType".equals(fieldName)) { + deserializedSsisEnvironmentReference.referenceType = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisEnvironmentReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java index 90ff35911f6b..35aa8f058e33 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionCredential.java @@ -6,29 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS package execution credential. */ @Fluent -public final class SsisExecutionCredential { +public final class SsisExecutionCredential implements JsonSerializable { /* * Domain for windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "domain", required = true) private Object domain; /* * UseName for windows authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "userName", required = true) private Object username; /* * Password for windows authentication. */ - @JsonProperty(value = "password", required = true) private SecureString password; /** @@ -124,4 +125,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisExecutionCredential.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("domain", this.domain); + jsonWriter.writeUntypedField("userName", this.username); + jsonWriter.writeJsonField("password", this.password); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisExecutionCredential from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisExecutionCredential if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisExecutionCredential. + */ + public static SsisExecutionCredential fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisExecutionCredential deserializedSsisExecutionCredential = new SsisExecutionCredential(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("domain".equals(fieldName)) { + deserializedSsisExecutionCredential.domain = reader.readUntyped(); + } else if ("userName".equals(fieldName)) { + deserializedSsisExecutionCredential.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedSsisExecutionCredential.password = SecureString.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisExecutionCredential; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java index 39f799c5fadf..bfdf44ef5efa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisExecutionParameter.java @@ -6,17 +6,20 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS execution parameter. */ @Fluent -public final class SsisExecutionParameter { +public final class SsisExecutionParameter implements JsonSerializable { /* * SSIS package execution parameter value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value", required = true) private Object value; /** @@ -60,4 +63,41 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisExecutionParameter.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisExecutionParameter from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisExecutionParameter if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisExecutionParameter. + */ + public static SsisExecutionParameter fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisExecutionParameter deserializedSsisExecutionParameter = new SsisExecutionParameter(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedSsisExecutionParameter.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisExecutionParameter; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java index 931d6cf457a2..6aa9f2838b6a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisFolder.java @@ -5,23 +5,19 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Ssis folder. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisFolder.class, visible = true) -@JsonTypeName("Folder") @Fluent public final class SsisFolder extends SsisObjectMetadata { /* * Type of metadata. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private SsisObjectMetadataType type = SsisObjectMetadataType.FOLDER; /** @@ -76,4 +72,49 @@ public SsisFolder withDescription(String description) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", id()); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisFolder from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisFolder if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SsisFolder. + */ + public static SsisFolder fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisFolder deserializedSsisFolder = new SsisFolder(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisFolder.withId(reader.getNullable(JsonReader::getLong)); + } else if ("name".equals(fieldName)) { + deserializedSsisFolder.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSsisFolder.withDescription(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedSsisFolder.type = SsisObjectMetadataType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisFolder; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java index 6db88c9d1274..89c2b7bfe9f1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocation.java @@ -6,30 +6,31 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SsisLogLocationTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; /** * SSIS package execution log location. */ @Fluent -public final class SsisLogLocation { +public final class SsisLogLocation implements JsonSerializable { /* * The SSIS package execution log path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "logPath", required = true) private Object logPath; /* * The type of SSIS log location. */ - @JsonProperty(value = "type", required = true) private SsisLogLocationType type; /* * SSIS package execution log location properties. */ - @JsonProperty(value = "typeProperties", required = true) private SsisLogLocationTypeProperties innerTypeProperties = new SsisLogLocationTypeProperties(); /** @@ -163,4 +164,47 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisLogLocation.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("logPath", this.logPath); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisLogLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisLogLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisLogLocation. + */ + public static SsisLogLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisLogLocation deserializedSsisLogLocation = new SsisLogLocation(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("logPath".equals(fieldName)) { + deserializedSsisLogLocation.logPath = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedSsisLogLocation.type = SsisLogLocationType.fromString(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedSsisLogLocation.innerTypeProperties = SsisLogLocationTypeProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisLogLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocationType.java index 21a0c62b5d31..a0612248602d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisLogLocationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public SsisLogLocationType() { * @param name a name to look for. * @return the corresponding SsisLogLocationType. */ - @JsonCreator public static SsisLogLocationType fromString(String name) { return fromString(name, SsisLogLocationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java index 3db60b9c6916..a49180ef63b9 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadata.java @@ -5,47 +5,35 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS object metadata. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisObjectMetadata.class, visible = true) -@JsonTypeName("SsisObjectMetadata") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Folder", value = SsisFolder.class), - @JsonSubTypes.Type(name = "Project", value = SsisProject.class), - @JsonSubTypes.Type(name = "Package", value = SsisPackage.class), - @JsonSubTypes.Type(name = "Environment", value = SsisEnvironment.class) }) @Fluent -public class SsisObjectMetadata { +public class SsisObjectMetadata implements JsonSerializable { /* * Type of metadata. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private SsisObjectMetadataType type = SsisObjectMetadataType.fromString("SsisObjectMetadata"); /* * Metadata id. */ - @JsonProperty(value = "id") private Long id; /* * Metadata name. */ - @JsonProperty(value = "name") private String name; /* * Metadata description. */ - @JsonProperty(value = "description") private String description; /** @@ -130,4 +118,80 @@ public SsisObjectMetadata withDescription(String description) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeNumberField("id", this.id); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("description", this.description); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisObjectMetadata from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisObjectMetadata if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisObjectMetadata. + */ + public static SsisObjectMetadata fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Folder".equals(discriminatorValue)) { + return SsisFolder.fromJson(readerToUse.reset()); + } else if ("Project".equals(discriminatorValue)) { + return SsisProject.fromJson(readerToUse.reset()); + } else if ("Package".equals(discriminatorValue)) { + return SsisPackage.fromJson(readerToUse.reset()); + } else if ("Environment".equals(discriminatorValue)) { + return SsisEnvironment.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static SsisObjectMetadata fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisObjectMetadata deserializedSsisObjectMetadata = new SsisObjectMetadata(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSsisObjectMetadata.type = SsisObjectMetadataType.fromString(reader.getString()); + } else if ("id".equals(fieldName)) { + deserializedSsisObjectMetadata.id = reader.getNullable(JsonReader::getLong); + } else if ("name".equals(fieldName)) { + deserializedSsisObjectMetadata.name = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedSsisObjectMetadata.description = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisObjectMetadata; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadataType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadataType.java index 224a6b5158c5..621718a14f00 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadataType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisObjectMetadataType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public SsisObjectMetadataType() { * @param name a name to look for. * @return the corresponding SsisObjectMetadataType. */ - @JsonCreator public static SsisObjectMetadataType fromString(String name) { return fromString(name, SsisObjectMetadataType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java index 3f6a65a4ea6d..16e9f1002361 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackage.java @@ -5,48 +5,40 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Ssis Package. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisPackage.class, visible = true) -@JsonTypeName("Package") @Fluent public final class SsisPackage extends SsisObjectMetadata { /* * Type of metadata. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private SsisObjectMetadataType type = SsisObjectMetadataType.PACKAGE; /* * Folder id which contains package. */ - @JsonProperty(value = "folderId") private Long folderId; /* * Project version which contains package. */ - @JsonProperty(value = "projectVersion") private Long projectVersion; /* * Project id which contains package. */ - @JsonProperty(value = "projectId") private Long projectId; /* * Parameters in package */ - @JsonProperty(value = "parameters") private List parameters; /** @@ -184,4 +176,62 @@ public void validate() { parameters().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", id()); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeNumberField("folderId", this.folderId); + jsonWriter.writeNumberField("projectVersion", this.projectVersion); + jsonWriter.writeNumberField("projectId", this.projectId); + jsonWriter.writeArrayField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisPackage from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisPackage if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisPackage. + */ + public static SsisPackage fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisPackage deserializedSsisPackage = new SsisPackage(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisPackage.withId(reader.getNullable(JsonReader::getLong)); + } else if ("name".equals(fieldName)) { + deserializedSsisPackage.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSsisPackage.withDescription(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedSsisPackage.type = SsisObjectMetadataType.fromString(reader.getString()); + } else if ("folderId".equals(fieldName)) { + deserializedSsisPackage.folderId = reader.getNullable(JsonReader::getLong); + } else if ("projectVersion".equals(fieldName)) { + deserializedSsisPackage.projectVersion = reader.getNullable(JsonReader::getLong); + } else if ("projectId".equals(fieldName)) { + deserializedSsisPackage.projectId = reader.getNullable(JsonReader::getLong); + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> SsisParameter.fromJson(reader1)); + deserializedSsisPackage.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedSsisPackage; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocation.java index 0678727430c0..09f675e79b2a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocation.java @@ -5,31 +5,32 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SsisPackageLocationTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * SSIS package location. */ @Fluent -public final class SsisPackageLocation { +public final class SsisPackageLocation implements JsonSerializable { /* * The SSIS package path. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "packagePath") private Object packagePath; /* * The type of SSIS package location. */ - @JsonProperty(value = "type") private SsisPackageLocationType type; /* * SSIS package location properties. */ - @JsonProperty(value = "typeProperties") private SsisPackageLocationTypeProperties innerTypeProperties; /** @@ -285,4 +286,47 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("packagePath", this.packagePath); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisPackageLocation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisPackageLocation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisPackageLocation. + */ + public static SsisPackageLocation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisPackageLocation deserializedSsisPackageLocation = new SsisPackageLocation(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("packagePath".equals(fieldName)) { + deserializedSsisPackageLocation.packagePath = reader.readUntyped(); + } else if ("type".equals(fieldName)) { + deserializedSsisPackageLocation.type = SsisPackageLocationType.fromString(reader.getString()); + } else if ("typeProperties".equals(fieldName)) { + deserializedSsisPackageLocation.innerTypeProperties + = SsisPackageLocationTypeProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisPackageLocation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocationType.java index 7bb5102965a4..d08492821b1d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPackageLocationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public SsisPackageLocationType() { * @param name a name to look for. * @return the corresponding SsisPackageLocationType. */ - @JsonCreator public static SsisPackageLocationType fromString(String name) { return fromString(name, SsisPackageLocationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisParameter.java index c3e30de2d903..0658b2c57363 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisParameter.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisParameter.java @@ -5,83 +5,75 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Ssis parameter. */ @Fluent -public final class SsisParameter { +public final class SsisParameter implements JsonSerializable { /* * Parameter id. */ - @JsonProperty(value = "id") private Long id; /* * Parameter name. */ - @JsonProperty(value = "name") private String name; /* * Parameter description. */ - @JsonProperty(value = "description") private String description; /* * Parameter type. */ - @JsonProperty(value = "dataType") private String dataType; /* * Whether parameter is required. */ - @JsonProperty(value = "required") private Boolean required; /* * Whether parameter is sensitive. */ - @JsonProperty(value = "sensitive") private Boolean sensitive; /* * Design default value of parameter. */ - @JsonProperty(value = "designDefaultValue") private String designDefaultValue; /* * Default value of parameter. */ - @JsonProperty(value = "defaultValue") private String defaultValue; /* * Default sensitive value of parameter. */ - @JsonProperty(value = "sensitiveDefaultValue") private String sensitiveDefaultValue; /* * Parameter value type. */ - @JsonProperty(value = "valueType") private String valueType; /* * Parameter value set. */ - @JsonProperty(value = "valueSet") private Boolean valueSet; /* * Parameter reference variable. */ - @JsonProperty(value = "variable") private String variable; /** @@ -337,4 +329,73 @@ public SsisParameter withVariable(String variable) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", this.id); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeStringField("dataType", this.dataType); + jsonWriter.writeBooleanField("required", this.required); + jsonWriter.writeBooleanField("sensitive", this.sensitive); + jsonWriter.writeStringField("designDefaultValue", this.designDefaultValue); + jsonWriter.writeStringField("defaultValue", this.defaultValue); + jsonWriter.writeStringField("sensitiveDefaultValue", this.sensitiveDefaultValue); + jsonWriter.writeStringField("valueType", this.valueType); + jsonWriter.writeBooleanField("valueSet", this.valueSet); + jsonWriter.writeStringField("variable", this.variable); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisParameter from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisParameter if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisParameter. + */ + public static SsisParameter fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisParameter deserializedSsisParameter = new SsisParameter(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisParameter.id = reader.getNullable(JsonReader::getLong); + } else if ("name".equals(fieldName)) { + deserializedSsisParameter.name = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedSsisParameter.description = reader.getString(); + } else if ("dataType".equals(fieldName)) { + deserializedSsisParameter.dataType = reader.getString(); + } else if ("required".equals(fieldName)) { + deserializedSsisParameter.required = reader.getNullable(JsonReader::getBoolean); + } else if ("sensitive".equals(fieldName)) { + deserializedSsisParameter.sensitive = reader.getNullable(JsonReader::getBoolean); + } else if ("designDefaultValue".equals(fieldName)) { + deserializedSsisParameter.designDefaultValue = reader.getString(); + } else if ("defaultValue".equals(fieldName)) { + deserializedSsisParameter.defaultValue = reader.getString(); + } else if ("sensitiveDefaultValue".equals(fieldName)) { + deserializedSsisParameter.sensitiveDefaultValue = reader.getString(); + } else if ("valueType".equals(fieldName)) { + deserializedSsisParameter.valueType = reader.getString(); + } else if ("valueSet".equals(fieldName)) { + deserializedSsisParameter.valueSet = reader.getNullable(JsonReader::getBoolean); + } else if ("variable".equals(fieldName)) { + deserializedSsisParameter.variable = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisParameter; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java index c523f6011a80..9a9fe16e0f2d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisProject.java @@ -5,48 +5,40 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Ssis project. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SsisProject.class, visible = true) -@JsonTypeName("Project") @Fluent public final class SsisProject extends SsisObjectMetadata { /* * Type of metadata. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private SsisObjectMetadataType type = SsisObjectMetadataType.PROJECT; /* * Folder id which contains project. */ - @JsonProperty(value = "folderId") private Long folderId; /* * Project version. */ - @JsonProperty(value = "version") private Long version; /* * Environment reference in project */ - @JsonProperty(value = "environmentRefs") private List environmentRefs; /* * Parameters in project */ - @JsonProperty(value = "parameters") private List parameters; /** @@ -187,4 +179,65 @@ public void validate() { parameters().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", id()); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeNumberField("folderId", this.folderId); + jsonWriter.writeNumberField("version", this.version); + jsonWriter.writeArrayField("environmentRefs", this.environmentRefs, + (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("parameters", this.parameters, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisProject from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisProject if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisProject. + */ + public static SsisProject fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisProject deserializedSsisProject = new SsisProject(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisProject.withId(reader.getNullable(JsonReader::getLong)); + } else if ("name".equals(fieldName)) { + deserializedSsisProject.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSsisProject.withDescription(reader.getString()); + } else if ("type".equals(fieldName)) { + deserializedSsisProject.type = SsisObjectMetadataType.fromString(reader.getString()); + } else if ("folderId".equals(fieldName)) { + deserializedSsisProject.folderId = reader.getNullable(JsonReader::getLong); + } else if ("version".equals(fieldName)) { + deserializedSsisProject.version = reader.getNullable(JsonReader::getLong); + } else if ("environmentRefs".equals(fieldName)) { + List environmentRefs + = reader.readArray(reader1 -> SsisEnvironmentReference.fromJson(reader1)); + deserializedSsisProject.environmentRefs = environmentRefs; + } else if ("parameters".equals(fieldName)) { + List parameters = reader.readArray(reader1 -> SsisParameter.fromJson(reader1)); + deserializedSsisProject.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedSsisProject; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java index e7bba79d4cf9..f4f215848087 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisPropertyOverride.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * SSIS property override. */ @Fluent -public final class SsisPropertyOverride { +public final class SsisPropertyOverride implements JsonSerializable { /* * SSIS package property override value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value", required = true) private Object value; /* * Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true */ - @JsonProperty(value = "isSensitive") private Boolean isSensitive; /** @@ -88,4 +90,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SsisPropertyOverride.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("value", this.value); + jsonWriter.writeBooleanField("isSensitive", this.isSensitive); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisPropertyOverride from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisPropertyOverride if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SsisPropertyOverride. + */ + public static SsisPropertyOverride fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisPropertyOverride deserializedSsisPropertyOverride = new SsisPropertyOverride(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedSsisPropertyOverride.value = reader.readUntyped(); + } else if ("isSensitive".equals(fieldName)) { + deserializedSsisPropertyOverride.isSensitive = reader.getNullable(JsonReader::getBoolean); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisPropertyOverride; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisVariable.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisVariable.java index 5be204c66e78..44906e03301f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisVariable.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SsisVariable.java @@ -5,53 +5,50 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Ssis variable. */ @Fluent -public final class SsisVariable { +public final class SsisVariable implements JsonSerializable { /* * Variable id. */ - @JsonProperty(value = "id") private Long id; /* * Variable name. */ - @JsonProperty(value = "name") private String name; /* * Variable description. */ - @JsonProperty(value = "description") private String description; /* * Variable type. */ - @JsonProperty(value = "dataType") private String dataType; /* * Whether variable is sensitive. */ - @JsonProperty(value = "sensitive") private Boolean sensitive; /* * Variable value. */ - @JsonProperty(value = "value") private String value; /* * Variable sensitive value. */ - @JsonProperty(value = "sensitiveValue") private String sensitiveValue; /** @@ -207,4 +204,58 @@ public SsisVariable withSensitiveValue(String sensitiveValue) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("id", this.id); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeStringField("dataType", this.dataType); + jsonWriter.writeBooleanField("sensitive", this.sensitive); + jsonWriter.writeStringField("value", this.value); + jsonWriter.writeStringField("sensitiveValue", this.sensitiveValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SsisVariable from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SsisVariable if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SsisVariable. + */ + public static SsisVariable fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SsisVariable deserializedSsisVariable = new SsisVariable(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedSsisVariable.id = reader.getNullable(JsonReader::getLong); + } else if ("name".equals(fieldName)) { + deserializedSsisVariable.name = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedSsisVariable.description = reader.getString(); + } else if ("dataType".equals(fieldName)) { + deserializedSsisVariable.dataType = reader.getString(); + } else if ("sensitive".equals(fieldName)) { + deserializedSsisVariable.sensitive = reader.getNullable(JsonReader::getBoolean); + } else if ("value".equals(fieldName)) { + deserializedSsisVariable.value = reader.getString(); + } else if ("sensitiveValue".equals(fieldName)) { + deserializedSsisVariable.sensitiveValue = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSsisVariable; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java index 6c274a326d6c..0345dfda718d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StagingSettings.java @@ -6,10 +6,11 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; @@ -17,30 +18,26 @@ * Staging settings. */ @Fluent -public final class StagingSettings { +public final class StagingSettings implements JsonSerializable { /* * Staging linked service reference. */ - @JsonProperty(value = "linkedServiceName", required = true) private LinkedServiceReference linkedServiceName; /* * The path to storage for storing the interim data. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "path") private Object path; /* * Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: * boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "enableCompression") private Object enableCompression; /* * Staging settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -118,7 +115,6 @@ public StagingSettings withEnableCompression(Object enableCompression) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -134,14 +130,6 @@ public StagingSettings withAdditionalProperties(Map additionalPr return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -158,4 +146,58 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(StagingSettings.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", this.linkedServiceName); + jsonWriter.writeUntypedField("path", this.path); + jsonWriter.writeUntypedField("enableCompression", this.enableCompression); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of StagingSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of StagingSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the StagingSettings. + */ + public static StagingSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + StagingSettings deserializedStagingSettings = new StagingSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedStagingSettings.linkedServiceName = LinkedServiceReference.fromJson(reader); + } else if ("path".equals(fieldName)) { + deserializedStagingSettings.path = reader.readUntyped(); + } else if ("enableCompression".equals(fieldName)) { + deserializedStagingSettings.enableCompression = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedStagingSettings.additionalProperties = additionalProperties; + + return deserializedStagingSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java index 9a068077cce1..05e3a598a2d1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreReadSettings.java @@ -5,64 +5,39 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; /** * Connector read setting. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StoreReadSettings.class, visible = true) -@JsonTypeName("StoreReadSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AzureBlobStorageReadSettings", value = AzureBlobStorageReadSettings.class), - @JsonSubTypes.Type(name = "AzureBlobFSReadSettings", value = AzureBlobFSReadSettings.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreReadSettings", value = AzureDataLakeStoreReadSettings.class), - @JsonSubTypes.Type(name = "AmazonS3ReadSettings", value = AmazonS3ReadSettings.class), - @JsonSubTypes.Type(name = "FileServerReadSettings", value = FileServerReadSettings.class), - @JsonSubTypes.Type(name = "AzureFileStorageReadSettings", value = AzureFileStorageReadSettings.class), - @JsonSubTypes.Type(name = "AmazonS3CompatibleReadSettings", value = AmazonS3CompatibleReadSettings.class), - @JsonSubTypes.Type(name = "OracleCloudStorageReadSettings", value = OracleCloudStorageReadSettings.class), - @JsonSubTypes.Type(name = "GoogleCloudStorageReadSettings", value = GoogleCloudStorageReadSettings.class), - @JsonSubTypes.Type(name = "FtpReadSettings", value = FtpReadSettings.class), - @JsonSubTypes.Type(name = "SftpReadSettings", value = SftpReadSettings.class), - @JsonSubTypes.Type(name = "HttpReadSettings", value = HttpReadSettings.class), - @JsonSubTypes.Type(name = "HdfsReadSettings", value = HdfsReadSettings.class), - @JsonSubTypes.Type(name = "LakeHouseReadSettings", value = LakeHouseReadSettings.class) }) @Fluent -public class StoreReadSettings { +public class StoreReadSettings implements JsonSerializable { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "StoreReadSettings"; /* * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType * integer). */ - @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; /* * Connector read setting. */ - @JsonIgnore private Map additionalProperties; /** @@ -129,7 +104,6 @@ public StoreReadSettings withDisableMetricsCollection(Object disableMetricsColle * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -145,14 +119,6 @@ public StoreReadSettings withAdditionalProperties(Map additional return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -160,4 +126,108 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("maxConcurrentConnections", this.maxConcurrentConnections); + jsonWriter.writeUntypedField("disableMetricsCollection", this.disableMetricsCollection); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of StoreReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of StoreReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the StoreReadSettings. + */ + public static StoreReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureBlobStorageReadSettings".equals(discriminatorValue)) { + return AzureBlobStorageReadSettings.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSReadSettings".equals(discriminatorValue)) { + return AzureBlobFSReadSettings.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreReadSettings".equals(discriminatorValue)) { + return AzureDataLakeStoreReadSettings.fromJson(readerToUse.reset()); + } else if ("AmazonS3ReadSettings".equals(discriminatorValue)) { + return AmazonS3ReadSettings.fromJson(readerToUse.reset()); + } else if ("FileServerReadSettings".equals(discriminatorValue)) { + return FileServerReadSettings.fromJson(readerToUse.reset()); + } else if ("AzureFileStorageReadSettings".equals(discriminatorValue)) { + return AzureFileStorageReadSettings.fromJson(readerToUse.reset()); + } else if ("AmazonS3CompatibleReadSettings".equals(discriminatorValue)) { + return AmazonS3CompatibleReadSettings.fromJson(readerToUse.reset()); + } else if ("OracleCloudStorageReadSettings".equals(discriminatorValue)) { + return OracleCloudStorageReadSettings.fromJson(readerToUse.reset()); + } else if ("GoogleCloudStorageReadSettings".equals(discriminatorValue)) { + return GoogleCloudStorageReadSettings.fromJson(readerToUse.reset()); + } else if ("FtpReadSettings".equals(discriminatorValue)) { + return FtpReadSettings.fromJson(readerToUse.reset()); + } else if ("SftpReadSettings".equals(discriminatorValue)) { + return SftpReadSettings.fromJson(readerToUse.reset()); + } else if ("HttpReadSettings".equals(discriminatorValue)) { + return HttpReadSettings.fromJson(readerToUse.reset()); + } else if ("HdfsReadSettings".equals(discriminatorValue)) { + return HdfsReadSettings.fromJson(readerToUse.reset()); + } else if ("LakeHouseReadSettings".equals(discriminatorValue)) { + return LakeHouseReadSettings.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static StoreReadSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + StoreReadSettings deserializedStoreReadSettings = new StoreReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedStoreReadSettings.type = reader.getString(); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedStoreReadSettings.maxConcurrentConnections = reader.readUntyped(); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedStoreReadSettings.disableMetricsCollection = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedStoreReadSettings.additionalProperties = additionalProperties; + + return deserializedStoreReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java index 8fe41165f3c4..d1a3673f6f63 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/StoreWriteSettings.java @@ -5,14 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -20,56 +17,39 @@ /** * Connector write settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StoreWriteSettings.class, visible = true) -@JsonTypeName("StoreWriteSettings") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "SftpWriteSettings", value = SftpWriteSettings.class), - @JsonSubTypes.Type(name = "AzureBlobStorageWriteSettings", value = AzureBlobStorageWriteSettings.class), - @JsonSubTypes.Type(name = "AzureBlobFSWriteSettings", value = AzureBlobFSWriteSettings.class), - @JsonSubTypes.Type(name = "AzureDataLakeStoreWriteSettings", value = AzureDataLakeStoreWriteSettings.class), - @JsonSubTypes.Type(name = "FileServerWriteSettings", value = FileServerWriteSettings.class), - @JsonSubTypes.Type(name = "AzureFileStorageWriteSettings", value = AzureFileStorageWriteSettings.class), - @JsonSubTypes.Type(name = "LakeHouseWriteSettings", value = LakeHouseWriteSettings.class) }) @Fluent -public class StoreWriteSettings { +public class StoreWriteSettings implements JsonSerializable { /* * The write setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "StoreWriteSettings"; /* * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType * integer). */ - @JsonProperty(value = "maxConcurrentConnections") private Object maxConcurrentConnections; /* * If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "disableMetricsCollection") private Object disableMetricsCollection; /* * The type of copy behavior for copy sink. */ - @JsonProperty(value = "copyBehavior") private Object copyBehavior; /* * Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array * of objects). */ - @JsonProperty(value = "metadata") private List metadata; /* * Connector write settings. */ - @JsonIgnore private Map additionalProperties; /** @@ -178,7 +158,6 @@ public StoreWriteSettings withMetadata(List metadata) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -194,14 +173,6 @@ public StoreWriteSettings withAdditionalProperties(Map additiona return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -212,4 +183,101 @@ public void validate() { metadata().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("maxConcurrentConnections", this.maxConcurrentConnections); + jsonWriter.writeUntypedField("disableMetricsCollection", this.disableMetricsCollection); + jsonWriter.writeUntypedField("copyBehavior", this.copyBehavior); + jsonWriter.writeArrayField("metadata", this.metadata, (writer, element) -> writer.writeJson(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of StoreWriteSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of StoreWriteSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the StoreWriteSettings. + */ + public static StoreWriteSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("SftpWriteSettings".equals(discriminatorValue)) { + return SftpWriteSettings.fromJson(readerToUse.reset()); + } else if ("AzureBlobStorageWriteSettings".equals(discriminatorValue)) { + return AzureBlobStorageWriteSettings.fromJson(readerToUse.reset()); + } else if ("AzureBlobFSWriteSettings".equals(discriminatorValue)) { + return AzureBlobFSWriteSettings.fromJson(readerToUse.reset()); + } else if ("AzureDataLakeStoreWriteSettings".equals(discriminatorValue)) { + return AzureDataLakeStoreWriteSettings.fromJson(readerToUse.reset()); + } else if ("FileServerWriteSettings".equals(discriminatorValue)) { + return FileServerWriteSettings.fromJson(readerToUse.reset()); + } else if ("AzureFileStorageWriteSettings".equals(discriminatorValue)) { + return AzureFileStorageWriteSettings.fromJson(readerToUse.reset()); + } else if ("LakeHouseWriteSettings".equals(discriminatorValue)) { + return LakeHouseWriteSettings.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static StoreWriteSettings fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + StoreWriteSettings deserializedStoreWriteSettings = new StoreWriteSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedStoreWriteSettings.type = reader.getString(); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedStoreWriteSettings.maxConcurrentConnections = reader.readUntyped(); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedStoreWriteSettings.disableMetricsCollection = reader.readUntyped(); + } else if ("copyBehavior".equals(fieldName)) { + deserializedStoreWriteSettings.copyBehavior = reader.readUntyped(); + } else if ("metadata".equals(fieldName)) { + List metadata = reader.readArray(reader1 -> MetadataItem.fromJson(reader1)); + deserializedStoreWriteSettings.metadata = metadata; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedStoreWriteSettings.additionalProperties = additionalProperties; + + return deserializedStoreWriteSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SubResourceDebugResource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SubResourceDebugResource.java index 666871cbe0c3..cd3376e3a1d0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SubResourceDebugResource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SubResourceDebugResource.java @@ -5,17 +5,20 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Azure Data Factory nested debug resource. */ @Fluent -public class SubResourceDebugResource { +public class SubResourceDebugResource implements JsonSerializable { /* * The resource name. */ - @JsonProperty(value = "name") private String name; /** @@ -51,4 +54,40 @@ public SubResourceDebugResource withName(String name) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SubResourceDebugResource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SubResourceDebugResource if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the SubResourceDebugResource. + */ + public static SubResourceDebugResource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SubResourceDebugResource deserializedSubResourceDebugResource = new SubResourceDebugResource(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSubResourceDebugResource.name = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedSubResourceDebugResource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java index f44b3cfe76c3..b6278b014847 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchActivity.java @@ -6,32 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SwitchActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity evaluates an expression and executes activities under the cases property that correspond to the * expression evaluation expected in the equals property. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SwitchActivity.class, visible = true) -@JsonTypeName("Switch") @Fluent public final class SwitchActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Switch"; /* * Switch activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private SwitchActivityTypeProperties innerTypeProperties = new SwitchActivityTypeProperties(); /** @@ -206,4 +203,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SwitchActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SwitchActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SwitchActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SwitchActivity. + */ + public static SwitchActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SwitchActivity deserializedSwitchActivity = new SwitchActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSwitchActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSwitchActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedSwitchActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedSwitchActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedSwitchActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedSwitchActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedSwitchActivity.innerTypeProperties = SwitchActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSwitchActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSwitchActivity.withAdditionalProperties(additionalProperties); + + return deserializedSwitchActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchCase.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchCase.java index a0c67a70d1df..cec3372e3a5a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchCase.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SwitchCase.java @@ -5,24 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.List; /** * Switch cases with have a value and corresponding activities. */ @Fluent -public final class SwitchCase { +public final class SwitchCase implements JsonSerializable { /* * Expected value that satisfies the expression result of the 'on' property. */ - @JsonProperty(value = "value") private String value; /* * List of activities to execute for satisfied case condition. */ - @JsonProperty(value = "activities") private List activities; /** @@ -81,4 +83,44 @@ public void validate() { activities().forEach(e -> e.validate()); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("value", this.value); + jsonWriter.writeArrayField("activities", this.activities, (writer, element) -> writer.writeJson(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SwitchCase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SwitchCase if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the SwitchCase. + */ + public static SwitchCase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SwitchCase deserializedSwitchCase = new SwitchCase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + deserializedSwitchCase.value = reader.getString(); + } else if ("activities".equals(fieldName)) { + List activities = reader.readArray(reader1 -> Activity.fromJson(reader1)); + deserializedSwitchCase.activities = activities; + } else { + reader.skipChildren(); + } + } + + return deserializedSwitchCase; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseAuthenticationType.java index 2ad7455f2125..731947fc1844 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public SybaseAuthenticationType() { * @param name a name to look for. * @return the corresponding SybaseAuthenticationType. */ - @JsonCreator public static SybaseAuthenticationType fromString(String name) { return fromString(name, SybaseAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java index 6709031c34ca..c137994809e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SybaseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Sybase data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseLinkedService.class, visible = true) -@JsonTypeName("Sybase") @Fluent public final class SybaseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Sybase"; /* * Sybase linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private SybaseLinkedServiceTypeProperties innerTypeProperties = new SybaseLinkedServiceTypeProperties(); /** @@ -276,4 +272,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SybaseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SybaseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SybaseLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SybaseLinkedService. + */ + public static SybaseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SybaseLinkedService deserializedSybaseLinkedService = new SybaseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedSybaseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSybaseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSybaseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSybaseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedSybaseLinkedService.innerTypeProperties + = SybaseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSybaseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSybaseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedSybaseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java index 070ce7cb7f59..1ce82d79e123 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for Sybase databases. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseSource.class, visible = true) -@JsonTypeName("SybaseSource") @Fluent public final class SybaseSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SybaseSource"; /* * Database query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -129,4 +126,72 @@ public SybaseSource withDisableMetricsCollection(Object disableMetricsCollection public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SybaseSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SybaseSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the SybaseSource. + */ + public static SybaseSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SybaseSource deserializedSybaseSource = new SybaseSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedSybaseSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedSybaseSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedSybaseSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedSybaseSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedSybaseSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedSybaseSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedSybaseSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedSybaseSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSybaseSource.withAdditionalProperties(additionalProperties); + + return deserializedSybaseSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java index c5649d5c2620..c35e46d8e60f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SybaseTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SybaseTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Sybase table dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = SybaseTableDataset.class, visible = true) -@JsonTypeName("SybaseTable") @Fluent public final class SybaseTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SybaseTable"; /* * Sybase table dataset properties. */ - @JsonProperty(value = "typeProperties") private SybaseTableDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SybaseTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SybaseTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SybaseTableDataset. + */ + public static SybaseTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SybaseTableDataset deserializedSybaseTableDataset = new SybaseTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedSybaseTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedSybaseTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedSybaseTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedSybaseTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedSybaseTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedSybaseTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedSybaseTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedSybaseTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedSybaseTableDataset.innerTypeProperties + = SybaseTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSybaseTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedSybaseTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java index 384ef6bd149d..1bc1fea2926f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SynapseNotebookActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Execute Synapse notebook activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SynapseNotebookActivity.class, - visible = true) -@JsonTypeName("SynapseNotebook") @Fluent public final class SynapseNotebookActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SynapseNotebook"; /* * Execute Synapse notebook activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private SynapseNotebookActivityTypeProperties innerTypeProperties = new SynapseNotebookActivityTypeProperties(); /** @@ -396,4 +388,85 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseNotebookActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseNotebookActivity if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseNotebookActivity. + */ + public static SynapseNotebookActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseNotebookActivity deserializedSynapseNotebookActivity = new SynapseNotebookActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSynapseNotebookActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSynapseNotebookActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedSynapseNotebookActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedSynapseNotebookActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedSynapseNotebookActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedSynapseNotebookActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedSynapseNotebookActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedSynapseNotebookActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSynapseNotebookActivity.innerTypeProperties + = SynapseNotebookActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSynapseNotebookActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSynapseNotebookActivity.withAdditionalProperties(additionalProperties); + + return deserializedSynapseNotebookActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java index aefa62cb71b3..032b4c6d8fe4 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Synapse notebook reference type. */ @Fluent -public final class SynapseNotebookReference { +public final class SynapseNotebookReference implements JsonSerializable { /* * Synapse notebook reference type. */ - @JsonProperty(value = "type", required = true) private NotebookReferenceType type; /* * Reference notebook name. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "referenceName", required = true) private Object referenceName; /** @@ -89,4 +91,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseNotebookReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseNotebookReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseNotebookReference. + */ + public static SynapseNotebookReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseNotebookReference deserializedSynapseNotebookReference = new SynapseNotebookReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSynapseNotebookReference.type = NotebookReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedSynapseNotebookReference.referenceName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSynapseNotebookReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java index 930f696f979e..942d1a9207ec 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java @@ -6,36 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.SynapseSparkJobActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Execute spark job activity. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = SynapseSparkJobDefinitionActivity.class, - visible = true) -@JsonTypeName("SparkJob") @Fluent public final class SynapseSparkJobDefinitionActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "SparkJob"; /* * Execute spark job activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private SynapseSparkJobActivityTypeProperties innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); /** @@ -553,4 +545,88 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobDefinitionActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseSparkJobDefinitionActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseSparkJobDefinitionActivity if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseSparkJobDefinitionActivity. + */ + public static SynapseSparkJobDefinitionActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseSparkJobDefinitionActivity deserializedSynapseSparkJobDefinitionActivity + = new SynapseSparkJobDefinitionActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity + .withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedSynapseSparkJobDefinitionActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedSynapseSparkJobDefinitionActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity + .withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity.innerTypeProperties + = SynapseSparkJobActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedSynapseSparkJobDefinitionActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedSynapseSparkJobDefinitionActivity.withAdditionalProperties(additionalProperties); + + return deserializedSynapseSparkJobDefinitionActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java index a95ec756afa5..f2ae680cc53c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Synapse spark job reference type. */ @Fluent -public final class SynapseSparkJobReference { +public final class SynapseSparkJobReference implements JsonSerializable { /* * Synapse spark job reference type. */ - @JsonProperty(value = "type", required = true) private SparkJobReferenceType type; /* * Reference spark job name. Expression with resultType string. */ - @JsonProperty(value = "referenceName", required = true) private Object referenceName; /** @@ -89,4 +91,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of SynapseSparkJobReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of SynapseSparkJobReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the SynapseSparkJobReference. + */ + public static SynapseSparkJobReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + SynapseSparkJobReference deserializedSynapseSparkJobReference = new SynapseSparkJobReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedSynapseSparkJobReference.type = SparkJobReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedSynapseSparkJobReference.referenceName = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedSynapseSparkJobReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java index bb1afaec2117..ca43a64ff94a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularSource.java @@ -5,103 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Copy activity sources of tabular type. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TabularSource.class, visible = true) -@JsonTypeName("TabularSource") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "AzureTableSource", value = AzureTableSource.class), - @JsonSubTypes.Type(name = "InformixSource", value = InformixSource.class), - @JsonSubTypes.Type(name = "Db2Source", value = Db2Source.class), - @JsonSubTypes.Type(name = "OdbcSource", value = OdbcSource.class), - @JsonSubTypes.Type(name = "MySqlSource", value = MySqlSource.class), - @JsonSubTypes.Type(name = "PostgreSqlSource", value = PostgreSqlSource.class), - @JsonSubTypes.Type(name = "PostgreSqlV2Source", value = PostgreSqlV2Source.class), - @JsonSubTypes.Type(name = "SybaseSource", value = SybaseSource.class), - @JsonSubTypes.Type(name = "SapBwSource", value = SapBwSource.class), - @JsonSubTypes.Type(name = "SalesforceSource", value = SalesforceSource.class), - @JsonSubTypes.Type(name = "SapCloudForCustomerSource", value = SapCloudForCustomerSource.class), - @JsonSubTypes.Type(name = "SapEccSource", value = SapEccSource.class), - @JsonSubTypes.Type(name = "SapHanaSource", value = SapHanaSource.class), - @JsonSubTypes.Type(name = "SapOpenHubSource", value = SapOpenHubSource.class), - @JsonSubTypes.Type(name = "SapOdpSource", value = SapOdpSource.class), - @JsonSubTypes.Type(name = "SapTableSource", value = SapTableSource.class), - @JsonSubTypes.Type(name = "SqlSource", value = SqlSource.class), - @JsonSubTypes.Type(name = "SqlServerSource", value = SqlServerSource.class), - @JsonSubTypes.Type(name = "AmazonRdsForSqlServerSource", value = AmazonRdsForSqlServerSource.class), - @JsonSubTypes.Type(name = "AzureSqlSource", value = AzureSqlSource.class), - @JsonSubTypes.Type(name = "SqlMISource", value = SqlMISource.class), - @JsonSubTypes.Type(name = "SqlDWSource", value = SqlDWSource.class), - @JsonSubTypes.Type(name = "AzureMySqlSource", value = AzureMySqlSource.class), - @JsonSubTypes.Type(name = "TeradataSource", value = TeradataSource.class), - @JsonSubTypes.Type(name = "CassandraSource", value = CassandraSource.class), - @JsonSubTypes.Type(name = "AmazonMWSSource", value = AmazonMwsSource.class), - @JsonSubTypes.Type(name = "AzurePostgreSqlSource", value = AzurePostgreSqlSource.class), - @JsonSubTypes.Type(name = "ConcurSource", value = ConcurSource.class), - @JsonSubTypes.Type(name = "CouchbaseSource", value = CouchbaseSource.class), - @JsonSubTypes.Type(name = "DrillSource", value = DrillSource.class), - @JsonSubTypes.Type(name = "EloquaSource", value = EloquaSource.class), - @JsonSubTypes.Type(name = "GoogleBigQuerySource", value = GoogleBigQuerySource.class), - @JsonSubTypes.Type(name = "GoogleBigQueryV2Source", value = GoogleBigQueryV2Source.class), - @JsonSubTypes.Type(name = "GreenplumSource", value = GreenplumSource.class), - @JsonSubTypes.Type(name = "HBaseSource", value = HBaseSource.class), - @JsonSubTypes.Type(name = "HiveSource", value = HiveSource.class), - @JsonSubTypes.Type(name = "HubspotSource", value = HubspotSource.class), - @JsonSubTypes.Type(name = "ImpalaSource", value = ImpalaSource.class), - @JsonSubTypes.Type(name = "JiraSource", value = JiraSource.class), - @JsonSubTypes.Type(name = "MagentoSource", value = MagentoSource.class), - @JsonSubTypes.Type(name = "MariaDBSource", value = MariaDBSource.class), - @JsonSubTypes.Type(name = "AzureMariaDBSource", value = AzureMariaDBSource.class), - @JsonSubTypes.Type(name = "MarketoSource", value = MarketoSource.class), - @JsonSubTypes.Type(name = "PaypalSource", value = PaypalSource.class), - @JsonSubTypes.Type(name = "PhoenixSource", value = PhoenixSource.class), - @JsonSubTypes.Type(name = "PrestoSource", value = PrestoSource.class), - @JsonSubTypes.Type(name = "QuickBooksSource", value = QuickBooksSource.class), - @JsonSubTypes.Type(name = "ServiceNowSource", value = ServiceNowSource.class), - @JsonSubTypes.Type(name = "ShopifySource", value = ShopifySource.class), - @JsonSubTypes.Type(name = "SparkSource", value = SparkSource.class), - @JsonSubTypes.Type(name = "SquareSource", value = SquareSource.class), - @JsonSubTypes.Type(name = "XeroSource", value = XeroSource.class), - @JsonSubTypes.Type(name = "ZohoSource", value = ZohoSource.class), - @JsonSubTypes.Type(name = "NetezzaSource", value = NetezzaSource.class), - @JsonSubTypes.Type(name = "VerticaSource", value = VerticaSource.class), - @JsonSubTypes.Type(name = "SalesforceMarketingCloudSource", value = SalesforceMarketingCloudSource.class), - @JsonSubTypes.Type(name = "ResponsysSource", value = ResponsysSource.class), - @JsonSubTypes.Type(name = "DynamicsAXSource", value = DynamicsAXSource.class), - @JsonSubTypes.Type(name = "OracleServiceCloudSource", value = OracleServiceCloudSource.class), - @JsonSubTypes.Type(name = "GoogleAdWordsSource", value = GoogleAdWordsSource.class), - @JsonSubTypes.Type(name = "AmazonRedshiftSource", value = AmazonRedshiftSource.class), - @JsonSubTypes.Type(name = "WarehouseSource", value = WarehouseSource.class), - @JsonSubTypes.Type(name = "SalesforceV2Source", value = SalesforceV2Source.class), - @JsonSubTypes.Type(name = "ServiceNowV2Source", value = ServiceNowV2Source.class) }) @Fluent public class TabularSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TabularSource"; /* * Query timeout. Type: string (or Expression with resultType string), pattern: * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ - @JsonProperty(value = "queryTimeout") private Object queryTimeout; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -209,4 +139,220 @@ public TabularSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("queryTimeout", this.queryTimeout); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TabularSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TabularSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TabularSource. + */ + public static TabularSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureTableSource".equals(discriminatorValue)) { + return AzureTableSource.fromJson(readerToUse.reset()); + } else if ("InformixSource".equals(discriminatorValue)) { + return InformixSource.fromJson(readerToUse.reset()); + } else if ("Db2Source".equals(discriminatorValue)) { + return Db2Source.fromJson(readerToUse.reset()); + } else if ("OdbcSource".equals(discriminatorValue)) { + return OdbcSource.fromJson(readerToUse.reset()); + } else if ("MySqlSource".equals(discriminatorValue)) { + return MySqlSource.fromJson(readerToUse.reset()); + } else if ("PostgreSqlSource".equals(discriminatorValue)) { + return PostgreSqlSource.fromJson(readerToUse.reset()); + } else if ("PostgreSqlV2Source".equals(discriminatorValue)) { + return PostgreSqlV2Source.fromJson(readerToUse.reset()); + } else if ("SybaseSource".equals(discriminatorValue)) { + return SybaseSource.fromJson(readerToUse.reset()); + } else if ("SapBwSource".equals(discriminatorValue)) { + return SapBwSource.fromJson(readerToUse.reset()); + } else if ("SalesforceSource".equals(discriminatorValue)) { + return SalesforceSource.fromJson(readerToUse.reset()); + } else if ("SapCloudForCustomerSource".equals(discriminatorValue)) { + return SapCloudForCustomerSource.fromJson(readerToUse.reset()); + } else if ("SapEccSource".equals(discriminatorValue)) { + return SapEccSource.fromJson(readerToUse.reset()); + } else if ("SapHanaSource".equals(discriminatorValue)) { + return SapHanaSource.fromJson(readerToUse.reset()); + } else if ("SapOpenHubSource".equals(discriminatorValue)) { + return SapOpenHubSource.fromJson(readerToUse.reset()); + } else if ("SapOdpSource".equals(discriminatorValue)) { + return SapOdpSource.fromJson(readerToUse.reset()); + } else if ("SapTableSource".equals(discriminatorValue)) { + return SapTableSource.fromJson(readerToUse.reset()); + } else if ("SqlSource".equals(discriminatorValue)) { + return SqlSource.fromJson(readerToUse.reset()); + } else if ("SqlServerSource".equals(discriminatorValue)) { + return SqlServerSource.fromJson(readerToUse.reset()); + } else if ("AmazonRdsForSqlServerSource".equals(discriminatorValue)) { + return AmazonRdsForSqlServerSource.fromJson(readerToUse.reset()); + } else if ("AzureSqlSource".equals(discriminatorValue)) { + return AzureSqlSource.fromJson(readerToUse.reset()); + } else if ("SqlMISource".equals(discriminatorValue)) { + return SqlMISource.fromJson(readerToUse.reset()); + } else if ("SqlDWSource".equals(discriminatorValue)) { + return SqlDWSource.fromJson(readerToUse.reset()); + } else if ("AzureMySqlSource".equals(discriminatorValue)) { + return AzureMySqlSource.fromJson(readerToUse.reset()); + } else if ("TeradataSource".equals(discriminatorValue)) { + return TeradataSource.fromJson(readerToUse.reset()); + } else if ("CassandraSource".equals(discriminatorValue)) { + return CassandraSource.fromJson(readerToUse.reset()); + } else if ("AmazonMWSSource".equals(discriminatorValue)) { + return AmazonMwsSource.fromJson(readerToUse.reset()); + } else if ("AzurePostgreSqlSource".equals(discriminatorValue)) { + return AzurePostgreSqlSource.fromJson(readerToUse.reset()); + } else if ("ConcurSource".equals(discriminatorValue)) { + return ConcurSource.fromJson(readerToUse.reset()); + } else if ("CouchbaseSource".equals(discriminatorValue)) { + return CouchbaseSource.fromJson(readerToUse.reset()); + } else if ("DrillSource".equals(discriminatorValue)) { + return DrillSource.fromJson(readerToUse.reset()); + } else if ("EloquaSource".equals(discriminatorValue)) { + return EloquaSource.fromJson(readerToUse.reset()); + } else if ("GoogleBigQuerySource".equals(discriminatorValue)) { + return GoogleBigQuerySource.fromJson(readerToUse.reset()); + } else if ("GoogleBigQueryV2Source".equals(discriminatorValue)) { + return GoogleBigQueryV2Source.fromJson(readerToUse.reset()); + } else if ("GreenplumSource".equals(discriminatorValue)) { + return GreenplumSource.fromJson(readerToUse.reset()); + } else if ("HBaseSource".equals(discriminatorValue)) { + return HBaseSource.fromJson(readerToUse.reset()); + } else if ("HiveSource".equals(discriminatorValue)) { + return HiveSource.fromJson(readerToUse.reset()); + } else if ("HubspotSource".equals(discriminatorValue)) { + return HubspotSource.fromJson(readerToUse.reset()); + } else if ("ImpalaSource".equals(discriminatorValue)) { + return ImpalaSource.fromJson(readerToUse.reset()); + } else if ("JiraSource".equals(discriminatorValue)) { + return JiraSource.fromJson(readerToUse.reset()); + } else if ("MagentoSource".equals(discriminatorValue)) { + return MagentoSource.fromJson(readerToUse.reset()); + } else if ("MariaDBSource".equals(discriminatorValue)) { + return MariaDBSource.fromJson(readerToUse.reset()); + } else if ("AzureMariaDBSource".equals(discriminatorValue)) { + return AzureMariaDBSource.fromJson(readerToUse.reset()); + } else if ("MarketoSource".equals(discriminatorValue)) { + return MarketoSource.fromJson(readerToUse.reset()); + } else if ("PaypalSource".equals(discriminatorValue)) { + return PaypalSource.fromJson(readerToUse.reset()); + } else if ("PhoenixSource".equals(discriminatorValue)) { + return PhoenixSource.fromJson(readerToUse.reset()); + } else if ("PrestoSource".equals(discriminatorValue)) { + return PrestoSource.fromJson(readerToUse.reset()); + } else if ("QuickBooksSource".equals(discriminatorValue)) { + return QuickBooksSource.fromJson(readerToUse.reset()); + } else if ("ServiceNowSource".equals(discriminatorValue)) { + return ServiceNowSource.fromJson(readerToUse.reset()); + } else if ("ShopifySource".equals(discriminatorValue)) { + return ShopifySource.fromJson(readerToUse.reset()); + } else if ("SparkSource".equals(discriminatorValue)) { + return SparkSource.fromJson(readerToUse.reset()); + } else if ("SquareSource".equals(discriminatorValue)) { + return SquareSource.fromJson(readerToUse.reset()); + } else if ("XeroSource".equals(discriminatorValue)) { + return XeroSource.fromJson(readerToUse.reset()); + } else if ("ZohoSource".equals(discriminatorValue)) { + return ZohoSource.fromJson(readerToUse.reset()); + } else if ("NetezzaSource".equals(discriminatorValue)) { + return NetezzaSource.fromJson(readerToUse.reset()); + } else if ("VerticaSource".equals(discriminatorValue)) { + return VerticaSource.fromJson(readerToUse.reset()); + } else if ("SalesforceMarketingCloudSource".equals(discriminatorValue)) { + return SalesforceMarketingCloudSource.fromJson(readerToUse.reset()); + } else if ("ResponsysSource".equals(discriminatorValue)) { + return ResponsysSource.fromJson(readerToUse.reset()); + } else if ("DynamicsAXSource".equals(discriminatorValue)) { + return DynamicsAXSource.fromJson(readerToUse.reset()); + } else if ("OracleServiceCloudSource".equals(discriminatorValue)) { + return OracleServiceCloudSource.fromJson(readerToUse.reset()); + } else if ("GoogleAdWordsSource".equals(discriminatorValue)) { + return GoogleAdWordsSource.fromJson(readerToUse.reset()); + } else if ("AmazonRedshiftSource".equals(discriminatorValue)) { + return AmazonRedshiftSource.fromJson(readerToUse.reset()); + } else if ("WarehouseSource".equals(discriminatorValue)) { + return WarehouseSource.fromJson(readerToUse.reset()); + } else if ("SalesforceV2Source".equals(discriminatorValue)) { + return SalesforceV2Source.fromJson(readerToUse.reset()); + } else if ("ServiceNowV2Source".equals(discriminatorValue)) { + return ServiceNowV2Source.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static TabularSource fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TabularSource deserializedTabularSource = new TabularSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedTabularSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedTabularSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedTabularSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedTabularSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedTabularSource.type = reader.getString(); + } else if ("queryTimeout".equals(fieldName)) { + deserializedTabularSource.queryTimeout = reader.readUntyped(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedTabularSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTabularSource.withAdditionalProperties(additionalProperties); + + return deserializedTabularSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java index 4a1fae014e51..92462a26325f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TabularTranslator.java @@ -5,30 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity tabular translator. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TabularTranslator.class, visible = true) -@JsonTypeName("TabularTranslator") @Fluent public final class TabularTranslator extends CopyTranslator { /* * Copy translator type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TabularTranslator"; /* * Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with * resultType string). This property will be retired. Please use mappings property. */ - @JsonProperty(value = "columnMappings") private Object columnMappings; /* @@ -36,21 +33,18 @@ public final class TabularTranslator extends CopyTranslator { * "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType * object). This property will be retired. Please use mappings property. */ - @JsonProperty(value = "schemaMapping") private Object schemaMapping; /* * The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType * object). */ - @JsonProperty(value = "collectionReference") private Object collectionReference; /* * Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression * with resultType boolean). */ - @JsonProperty(value = "mapComplexValuesToString") private Object mapComplexValuesToString; /* @@ -62,20 +56,17 @@ public final class TabularTranslator extends CopyTranslator { * "path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or * Expression with resultType object). */ - @JsonProperty(value = "mappings") private Object mappings; /* * Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "typeConversion") private Object typeConversion; /* * Type conversion settings */ - @JsonProperty(value = "typeConversionSettings") private TypeConversionSettings typeConversionSettings; /** @@ -266,4 +257,72 @@ public void validate() { typeConversionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("columnMappings", this.columnMappings); + jsonWriter.writeUntypedField("schemaMapping", this.schemaMapping); + jsonWriter.writeUntypedField("collectionReference", this.collectionReference); + jsonWriter.writeUntypedField("mapComplexValuesToString", this.mapComplexValuesToString); + jsonWriter.writeUntypedField("mappings", this.mappings); + jsonWriter.writeUntypedField("typeConversion", this.typeConversion); + jsonWriter.writeJsonField("typeConversionSettings", this.typeConversionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TabularTranslator from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TabularTranslator if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TabularTranslator. + */ + public static TabularTranslator fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TabularTranslator deserializedTabularTranslator = new TabularTranslator(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedTabularTranslator.type = reader.getString(); + } else if ("columnMappings".equals(fieldName)) { + deserializedTabularTranslator.columnMappings = reader.readUntyped(); + } else if ("schemaMapping".equals(fieldName)) { + deserializedTabularTranslator.schemaMapping = reader.readUntyped(); + } else if ("collectionReference".equals(fieldName)) { + deserializedTabularTranslator.collectionReference = reader.readUntyped(); + } else if ("mapComplexValuesToString".equals(fieldName)) { + deserializedTabularTranslator.mapComplexValuesToString = reader.readUntyped(); + } else if ("mappings".equals(fieldName)) { + deserializedTabularTranslator.mappings = reader.readUntyped(); + } else if ("typeConversion".equals(fieldName)) { + deserializedTabularTranslator.typeConversion = reader.readUntyped(); + } else if ("typeConversionSettings".equals(fieldName)) { + deserializedTabularTranslator.typeConversionSettings = TypeConversionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTabularTranslator.withAdditionalProperties(additionalProperties); + + return deserializedTabularTranslator; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java index 9750e1392f9b..e56e5a79308a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarGZipReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The TarGZip compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TarGZipReadSettings.class, visible = true) -@JsonTypeName("TarGZipReadSettings") @Fluent public final class TarGZipReadSettings extends CompressionReadSettings { /* * The Compression setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TarGZipReadSettings"; /* * Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "preserveCompressionFileNameAsFolder") private Object preserveCompressionFileNameAsFolder; /** @@ -77,4 +74,54 @@ public TarGZipReadSettings withPreserveCompressionFileNameAsFolder(Object preser public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preserveCompressionFileNameAsFolder", this.preserveCompressionFileNameAsFolder); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TarGZipReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TarGZipReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TarGZipReadSettings. + */ + public static TarGZipReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TarGZipReadSettings deserializedTarGZipReadSettings = new TarGZipReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedTarGZipReadSettings.type = reader.getString(); + } else if ("preserveCompressionFileNameAsFolder".equals(fieldName)) { + deserializedTarGZipReadSettings.preserveCompressionFileNameAsFolder = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTarGZipReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedTarGZipReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java index c78a2241fce2..72e9e1858e07 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TarReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The Tar compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TarReadSettings.class, visible = true) -@JsonTypeName("TarReadSettings") @Fluent public final class TarReadSettings extends CompressionReadSettings { /* * The Compression setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TarReadSettings"; /* * Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "preserveCompressionFileNameAsFolder") private Object preserveCompressionFileNameAsFolder; /** @@ -77,4 +74,54 @@ public TarReadSettings withPreserveCompressionFileNameAsFolder(Object preserveCo public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preserveCompressionFileNameAsFolder", this.preserveCompressionFileNameAsFolder); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TarReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TarReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TarReadSettings. + */ + public static TarReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TarReadSettings deserializedTarReadSettings = new TarReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedTarReadSettings.type = reader.getString(); + } else if ("preserveCompressionFileNameAsFolder".equals(fieldName)) { + deserializedTarReadSettings.preserveCompressionFileNameAsFolder = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTarReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedTarReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskAuthenticationType.java index 72ea996761eb..e2dd5e483a07 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public TeamDeskAuthenticationType() { * @param name a name to look for. * @return the corresponding TeamDeskAuthenticationType. */ - @JsonCreator public static TeamDeskAuthenticationType fromString(String name) { return fromString(name, TeamDeskAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java index 9efde035249d..4c78b325ece2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeamDeskLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TeamDeskLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for TeamDesk. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeamDeskLinkedService.class, visible = true) -@JsonTypeName("TeamDesk") @Fluent public final class TeamDeskLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TeamDesk"; /* * TeamDesk linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private TeamDeskLinkedServiceTypeProperties innerTypeProperties = new TeamDeskLinkedServiceTypeProperties(); /** @@ -255,4 +251,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TeamDeskLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeamDeskLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeamDeskLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TeamDeskLinkedService. + */ + public static TeamDeskLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeamDeskLinkedService deserializedTeamDeskLinkedService = new TeamDeskLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedTeamDeskLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedTeamDeskLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedTeamDeskLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTeamDeskLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedTeamDeskLinkedService.innerTypeProperties + = TeamDeskLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedTeamDeskLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTeamDeskLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedTeamDeskLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataAuthenticationType.java index ee5664fb0963..46c53456f915 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public TeradataAuthenticationType() { * @param name a name to look for. * @return the corresponding TeradataAuthenticationType. */ - @JsonCreator public static TeradataAuthenticationType fromString(String name) { return fromString(name, TeradataAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java index 1d3cc30923cf..9d9caed9a668 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TeradataLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Teradata data source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataLinkedService.class, visible = true) -@JsonTypeName("Teradata") @Fluent public final class TeradataLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Teradata"; /* * Teradata linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private TeradataLinkedServiceTypeProperties innerTypeProperties = new TeradataLinkedServiceTypeProperties(); /** @@ -255,4 +251,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TeradataLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TeradataLinkedService. + */ + public static TeradataLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataLinkedService deserializedTeradataLinkedService = new TeradataLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedTeradataLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedTeradataLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedTeradataLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTeradataLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedTeradataLinkedService.innerTypeProperties + = TeradataLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedTeradataLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTeradataLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedTeradataLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java index b9a3f8b1dac7..5e34e171a3a7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataPartitionSettings.java @@ -5,32 +5,33 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * The settings that will be leveraged for teradata source partitioning. */ @Fluent -public final class TeradataPartitionSettings { +public final class TeradataPartitionSettings implements JsonSerializable { /* * The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "partitionColumnName") private Object partitionColumnName; /* * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionUpperBound") private Object partitionUpperBound; /* * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "partitionLowerBound") private Object partitionLowerBound; /** @@ -112,4 +113,46 @@ public TeradataPartitionSettings withPartitionLowerBound(Object partitionLowerBo */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("partitionColumnName", this.partitionColumnName); + jsonWriter.writeUntypedField("partitionUpperBound", this.partitionUpperBound); + jsonWriter.writeUntypedField("partitionLowerBound", this.partitionLowerBound); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataPartitionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataPartitionSettings if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the TeradataPartitionSettings. + */ + public static TeradataPartitionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataPartitionSettings deserializedTeradataPartitionSettings = new TeradataPartitionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partitionColumnName".equals(fieldName)) { + deserializedTeradataPartitionSettings.partitionColumnName = reader.readUntyped(); + } else if ("partitionUpperBound".equals(fieldName)) { + deserializedTeradataPartitionSettings.partitionUpperBound = reader.readUntyped(); + } else if ("partitionLowerBound".equals(fieldName)) { + deserializedTeradataPartitionSettings.partitionLowerBound = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedTeradataPartitionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java index f9e3c039c2ed..b01121ea3d41 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Teradata source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataSource.class, visible = true) -@JsonTypeName("TeradataSource") @Fluent public final class TeradataSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TeradataSource"; /* * Teradata query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /* * The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", * "DynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for teradata source partitioning. */ - @JsonProperty(value = "partitionSettings") private TeradataPartitionSettings partitionSettings; /** @@ -187,4 +182,78 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TeradataSource. + */ + public static TeradataSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataSource deserializedTeradataSource = new TeradataSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedTeradataSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedTeradataSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedTeradataSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedTeradataSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedTeradataSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedTeradataSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedTeradataSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedTeradataSource.query = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedTeradataSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedTeradataSource.partitionSettings = TeradataPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTeradataSource.withAdditionalProperties(additionalProperties); + + return deserializedTeradataSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java index 773f68d4712e..cc9f90ebfd53 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TeradataTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TeradataTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The Teradata database dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TeradataTableDataset.class, visible = true) -@JsonTypeName("TeradataTable") @Fluent public final class TeradataTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TeradataTable"; /* * Teradata dataset properties. */ - @JsonProperty(value = "typeProperties") private TeradataTableDatasetTypeProperties innerTypeProperties; /** @@ -179,4 +175,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TeradataTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TeradataTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TeradataTableDataset. + */ + public static TeradataTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TeradataTableDataset deserializedTeradataTableDataset = new TeradataTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedTeradataTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedTeradataTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedTeradataTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedTeradataTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedTeradataTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTeradataTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedTeradataTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedTeradataTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedTeradataTableDataset.innerTypeProperties + = TeradataTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTeradataTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedTeradataTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java index 8b302447c02b..b418b1bdfda8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TextFormat.java @@ -5,53 +5,46 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The data stored in text format. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TextFormat.class, visible = true) -@JsonTypeName("TextFormat") @Fluent public final class TextFormat extends DatasetStorageFormat { /* * Type of dataset storage format. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TextFormat"; /* * The column delimiter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "columnDelimiter") private Object columnDelimiter; /* * The row delimiter. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "rowDelimiter") private Object rowDelimiter; /* * The escape character. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "escapeChar") private Object escapeChar; /* * The quote character. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "quoteChar") private Object quoteChar; /* * The null value string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "nullValue") private Object nullValue; /* @@ -60,28 +53,24 @@ public final class TextFormat extends DatasetStorageFormat { * values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "encodingName") private Object encodingName; /* * Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "treatEmptyAsNull") private Object treatEmptyAsNull; /* * The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or * Expression with resultType integer). */ - @JsonProperty(value = "skipLineCount") private Object skipLineCount; /* * When used as input, treat the first row of data as headers. When used as output,write the headers into the output * as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "firstRowAsHeader") private Object firstRowAsHeader; /** @@ -321,4 +310,84 @@ public TextFormat withDeserializer(Object deserializer) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("serializer", serializer()); + jsonWriter.writeUntypedField("deserializer", deserializer()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("columnDelimiter", this.columnDelimiter); + jsonWriter.writeUntypedField("rowDelimiter", this.rowDelimiter); + jsonWriter.writeUntypedField("escapeChar", this.escapeChar); + jsonWriter.writeUntypedField("quoteChar", this.quoteChar); + jsonWriter.writeUntypedField("nullValue", this.nullValue); + jsonWriter.writeUntypedField("encodingName", this.encodingName); + jsonWriter.writeUntypedField("treatEmptyAsNull", this.treatEmptyAsNull); + jsonWriter.writeUntypedField("skipLineCount", this.skipLineCount); + jsonWriter.writeUntypedField("firstRowAsHeader", this.firstRowAsHeader); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TextFormat from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TextFormat if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the TextFormat. + */ + public static TextFormat fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TextFormat deserializedTextFormat = new TextFormat(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("serializer".equals(fieldName)) { + deserializedTextFormat.withSerializer(reader.readUntyped()); + } else if ("deserializer".equals(fieldName)) { + deserializedTextFormat.withDeserializer(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedTextFormat.type = reader.getString(); + } else if ("columnDelimiter".equals(fieldName)) { + deserializedTextFormat.columnDelimiter = reader.readUntyped(); + } else if ("rowDelimiter".equals(fieldName)) { + deserializedTextFormat.rowDelimiter = reader.readUntyped(); + } else if ("escapeChar".equals(fieldName)) { + deserializedTextFormat.escapeChar = reader.readUntyped(); + } else if ("quoteChar".equals(fieldName)) { + deserializedTextFormat.quoteChar = reader.readUntyped(); + } else if ("nullValue".equals(fieldName)) { + deserializedTextFormat.nullValue = reader.readUntyped(); + } else if ("encodingName".equals(fieldName)) { + deserializedTextFormat.encodingName = reader.readUntyped(); + } else if ("treatEmptyAsNull".equals(fieldName)) { + deserializedTextFormat.treatEmptyAsNull = reader.readUntyped(); + } else if ("skipLineCount".equals(fieldName)) { + deserializedTextFormat.skipLineCount = reader.readUntyped(); + } else if ("firstRowAsHeader".equals(fieldName)) { + deserializedTextFormat.firstRowAsHeader = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTextFormat.withAdditionalProperties(additionalProperties); + + return deserializedTextFormat; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java index 8bf803148e01..50eb089b3d3b 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Transformation.java @@ -6,41 +6,40 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A data flow transformation. */ @Fluent -public class Transformation { +public class Transformation implements JsonSerializable { /* * Transformation name. */ - @JsonProperty(value = "name", required = true) private String name; /* * Transformation description. */ - @JsonProperty(value = "description") private String description; /* * Dataset reference. */ - @JsonProperty(value = "dataset") private DatasetReference dataset; /* * Linked service reference. */ - @JsonProperty(value = "linkedService") private LinkedServiceReference linkedService; /* * Flowlet Reference */ - @JsonProperty(value = "flowlet") private DataFlowReference flowlet; /** @@ -171,4 +170,53 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(Transformation.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeJsonField("dataset", this.dataset); + jsonWriter.writeJsonField("linkedService", this.linkedService); + jsonWriter.writeJsonField("flowlet", this.flowlet); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Transformation from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Transformation if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the Transformation. + */ + public static Transformation fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Transformation deserializedTransformation = new Transformation(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedTransformation.name = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedTransformation.description = reader.getString(); + } else if ("dataset".equals(fieldName)) { + deserializedTransformation.dataset = DatasetReference.fromJson(reader); + } else if ("linkedService".equals(fieldName)) { + deserializedTransformation.linkedService = LinkedServiceReference.fromJson(reader); + } else if ("flowlet".equals(fieldName)) { + deserializedTransformation.flowlet = DataFlowReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedTransformation; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java index bf3d3744c490..56dadeeb2ba1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/Trigger.java @@ -5,14 +5,11 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -20,44 +17,31 @@ /** * Azure data factory nested object which contains information about creating pipeline run. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = Trigger.class, visible = true) -@JsonTypeName("Trigger") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "MultiplePipelineTrigger", value = MultiplePipelineTrigger.class), - @JsonSubTypes.Type(name = "TumblingWindowTrigger", value = TumblingWindowTrigger.class), - @JsonSubTypes.Type(name = "RerunTumblingWindowTrigger", value = RerunTumblingWindowTrigger.class), - @JsonSubTypes.Type(name = "ChainingTrigger", value = ChainingTrigger.class) }) @Fluent -public class Trigger { +public class Trigger implements JsonSerializable { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Trigger"; /* * Trigger description. */ - @JsonProperty(value = "description") private String description; /* * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. */ - @JsonProperty(value = "runtimeState", access = JsonProperty.Access.WRITE_ONLY) private TriggerRuntimeState runtimeState; /* * List of tags that can be used for describing the trigger. */ - @JsonProperty(value = "annotations") private List annotations; /* * Azure data factory nested object which contains information about creating pipeline run */ - @JsonIgnore private Map additionalProperties; /** @@ -105,6 +89,18 @@ public TriggerRuntimeState runtimeState() { return this.runtimeState; } + /** + * Set the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @param runtimeState the runtimeState value to set. + * @return the Trigger object itself. + */ + Trigger withRuntimeState(TriggerRuntimeState runtimeState) { + this.runtimeState = runtimeState; + return this; + } + /** * Get the annotations property: List of tags that can be used for describing the trigger. * @@ -131,7 +127,6 @@ public Trigger withAnnotations(List annotations) { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -148,14 +143,6 @@ public Trigger withAdditionalProperties(Map additionalProperties return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -163,4 +150,99 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("description", this.description); + jsonWriter.writeArrayField("annotations", this.annotations, (writer, element) -> writer.writeUntyped(element)); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of Trigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of Trigger if the JsonReader was pointing to an instance of it, or null if it was pointing to + * JSON null. + * @throws IOException If an error occurs while reading the Trigger. + */ + public static Trigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("MultiplePipelineTrigger".equals(discriminatorValue)) { + return MultiplePipelineTrigger.fromJsonKnownDiscriminator(readerToUse.reset()); + } else if ("ScheduleTrigger".equals(discriminatorValue)) { + return ScheduleTrigger.fromJson(readerToUse.reset()); + } else if ("BlobTrigger".equals(discriminatorValue)) { + return BlobTrigger.fromJson(readerToUse.reset()); + } else if ("BlobEventsTrigger".equals(discriminatorValue)) { + return BlobEventsTrigger.fromJson(readerToUse.reset()); + } else if ("CustomEventsTrigger".equals(discriminatorValue)) { + return CustomEventsTrigger.fromJson(readerToUse.reset()); + } else if ("TumblingWindowTrigger".equals(discriminatorValue)) { + return TumblingWindowTrigger.fromJson(readerToUse.reset()); + } else if ("RerunTumblingWindowTrigger".equals(discriminatorValue)) { + return RerunTumblingWindowTrigger.fromJson(readerToUse.reset()); + } else if ("ChainingTrigger".equals(discriminatorValue)) { + return ChainingTrigger.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static Trigger fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + Trigger deserializedTrigger = new Trigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedTrigger.type = reader.getString(); + } else if ("description".equals(fieldName)) { + deserializedTrigger.description = reader.getString(); + } else if ("runtimeState".equals(fieldName)) { + deserializedTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTrigger.annotations = annotations; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTrigger.additionalProperties = additionalProperties; + + return deserializedTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java index 9bc7451be39c..1ede521f42e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerDependencyReference.java @@ -6,38 +6,24 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Trigger referenced dependency. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = TriggerDependencyReference.class, - visible = true) -@JsonTypeName("TriggerDependencyReference") -@JsonSubTypes({ - @JsonSubTypes.Type( - name = "TumblingWindowTriggerDependencyReference", - value = TumblingWindowTriggerDependencyReference.class) }) @Fluent public class TriggerDependencyReference extends DependencyReference { /* * The type of dependency reference. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TriggerDependencyReference"; /* * Referenced trigger. */ - @JsonProperty(value = "referenceTrigger", required = true) private TriggerReference referenceTrigger; /** @@ -94,4 +80,69 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerDependencyReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("referenceTrigger", this.referenceTrigger); + jsonWriter.writeStringField("type", this.type); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerDependencyReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerDependencyReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerDependencyReference. + */ + public static TriggerDependencyReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("type".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("TumblingWindowTriggerDependencyReference".equals(discriminatorValue)) { + return TumblingWindowTriggerDependencyReference.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static TriggerDependencyReference fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerDependencyReference deserializedTriggerDependencyReference = new TriggerDependencyReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceTrigger".equals(fieldName)) { + deserializedTriggerDependencyReference.referenceTrigger = TriggerReference.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedTriggerDependencyReference.type = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerDependencyReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java index 43e6bb5828d4..7c19a33b283f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerFilterParameters.java @@ -5,23 +5,25 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Query parameters for triggers. */ @Fluent -public final class TriggerFilterParameters { +public final class TriggerFilterParameters implements JsonSerializable { /* * The continuation token for getting the next page of results. Null for first page. */ - @JsonProperty(value = "continuationToken") private String continuationToken; /* * The name of the parent TumblingWindowTrigger to get the child rerun triggers */ - @JsonProperty(value = "parentTriggerName") private String parentTriggerName; /** @@ -79,4 +81,43 @@ public TriggerFilterParameters withParentTriggerName(String parentTriggerName) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("continuationToken", this.continuationToken); + jsonWriter.writeStringField("parentTriggerName", this.parentTriggerName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerFilterParameters from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerFilterParameters if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the TriggerFilterParameters. + */ + public static TriggerFilterParameters fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerFilterParameters deserializedTriggerFilterParameters = new TriggerFilterParameters(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("continuationToken".equals(fieldName)) { + deserializedTriggerFilterParameters.continuationToken = reader.getString(); + } else if ("parentTriggerName".equals(fieldName)) { + deserializedTriggerFilterParameters.parentTriggerName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerFilterParameters; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java index 659ab5fd3514..692da1f2f675 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerListResponse.java @@ -6,25 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TriggerResourceInner; -import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; import java.util.List; /** * A list of trigger resources. */ @Fluent -public final class TriggerListResponse { +public final class TriggerListResponse implements JsonSerializable { /* * List of triggers. */ - @JsonProperty(value = "value", required = true) private List value; /* * The link to the next page of results, if any remaining results exist. */ - @JsonProperty(value = "nextLink") private String nextLink; /** @@ -88,4 +90,46 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerListResponse.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerListResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerListResponse if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerListResponse. + */ + public static TriggerListResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerListResponse deserializedTriggerListResponse = new TriggerListResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> TriggerResourceInner.fromJson(reader1)); + deserializedTriggerListResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedTriggerListResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerListResponse; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerPipelineReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerPipelineReference.java index 1d9e0f2c7fd3..623cfbbf8821 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerPipelineReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerPipelineReference.java @@ -5,26 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.util.Map; /** * Pipeline that needs to be triggered with the given parameters. */ @Fluent -public final class TriggerPipelineReference { +public final class TriggerPipelineReference implements JsonSerializable { /* * Pipeline reference. */ - @JsonProperty(value = "pipelineReference") private PipelineReference pipelineReference; /* * Pipeline parameters. */ - @JsonProperty(value = "parameters") - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map parameters; /** @@ -83,4 +83,44 @@ public void validate() { pipelineReference().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("pipelineReference", this.pipelineReference); + jsonWriter.writeMapField("parameters", this.parameters, (writer, element) -> writer.writeUntyped(element)); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerPipelineReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerPipelineReference if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the TriggerPipelineReference. + */ + public static TriggerPipelineReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerPipelineReference deserializedTriggerPipelineReference = new TriggerPipelineReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("pipelineReference".equals(fieldName)) { + deserializedTriggerPipelineReference.pipelineReference = PipelineReference.fromJson(reader); + } else if ("parameters".equals(fieldName)) { + Map parameters = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedTriggerPipelineReference.parameters = parameters; + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerPipelineReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java index 9d7a076ccaa8..a0813c6f5654 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReference.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Trigger reference type. */ @Fluent -public final class TriggerReference { +public final class TriggerReference implements JsonSerializable { /* * Trigger reference type. */ - @JsonProperty(value = "type", required = true) private TriggerReferenceType type; /* * Reference trigger name. */ - @JsonProperty(value = "referenceName", required = true) private String referenceName; /** @@ -88,4 +90,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TriggerReference.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeStringField("referenceName", this.referenceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerReference if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TriggerReference. + */ + public static TriggerReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerReference deserializedTriggerReference = new TriggerReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedTriggerReference.type = TriggerReferenceType.fromString(reader.getString()); + } else if ("referenceName".equals(fieldName)) { + deserializedTriggerReference.referenceName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTriggerReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReferenceType.java index bc822c6f88db..34972f4798c1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReferenceType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerReferenceType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public TriggerReferenceType() { * @param name a name to look for. * @return the corresponding TriggerReferenceType. */ - @JsonCreator public static TriggerReferenceType fromString(String name) { return fromString(name, TriggerReferenceType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRun.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRun.java index f65550ec9891..e2c770560b87 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRun.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRun.java @@ -5,11 +5,12 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.core.util.CoreUtils; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; import java.time.OffsetDateTime; import java.util.LinkedHashMap; import java.util.Map; @@ -18,75 +19,60 @@ * Trigger runs. */ @Fluent -public final class TriggerRun { +public final class TriggerRun implements JsonSerializable { /* * Trigger run id. */ - @JsonProperty(value = "triggerRunId", access = JsonProperty.Access.WRITE_ONLY) private String triggerRunId; /* * Trigger name. */ - @JsonProperty(value = "triggerName", access = JsonProperty.Access.WRITE_ONLY) private String triggerName; /* * Trigger type. */ - @JsonProperty(value = "triggerType", access = JsonProperty.Access.WRITE_ONLY) private String triggerType; /* * Trigger run start time. */ - @JsonProperty(value = "triggerRunTimestamp", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime triggerRunTimestamp; /* * Trigger run status. */ - @JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY) private TriggerRunStatus status; /* * Trigger error message. */ - @JsonProperty(value = "message", access = JsonProperty.Access.WRITE_ONLY) private String message; /* * List of property name and value related to trigger run. Name, value pair depends on type of trigger. */ - @JsonProperty(value = "properties", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map properties; /* * List of pipeline name and run Id triggered by the trigger run. */ - @JsonProperty(value = "triggeredPipelines", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map triggeredPipelines; /* * Run dimension for which trigger was fired. */ - @JsonProperty(value = "runDimension", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map runDimension; /* * Status of the upstream pipelines. */ - @JsonProperty(value = "dependencyStatus", access = JsonProperty.Access.WRITE_ONLY) - @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map dependencyStatus; /* * Trigger runs. */ - @JsonIgnore private Map additionalProperties; /** @@ -191,7 +177,6 @@ public Map dependencyStatus() { * * @return the additionalProperties value. */ - @JsonAnyGetter public Map additionalProperties() { return this.additionalProperties; } @@ -207,14 +192,6 @@ public TriggerRun withAdditionalProperties(Map additionalPropert return this; } - @JsonAnySetter - void withAdditionalProperties(String key, Object value) { - if (additionalProperties == null) { - additionalProperties = new LinkedHashMap<>(); - } - additionalProperties.put(key, value); - } - /** * Validates the instance. * @@ -222,4 +199,73 @@ void withAdditionalProperties(String key, Object value) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + if (additionalProperties != null) { + for (Map.Entry additionalProperty : additionalProperties.entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TriggerRun from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TriggerRun if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the TriggerRun. + */ + public static TriggerRun fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TriggerRun deserializedTriggerRun = new TriggerRun(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("triggerRunId".equals(fieldName)) { + deserializedTriggerRun.triggerRunId = reader.getString(); + } else if ("triggerName".equals(fieldName)) { + deserializedTriggerRun.triggerName = reader.getString(); + } else if ("triggerType".equals(fieldName)) { + deserializedTriggerRun.triggerType = reader.getString(); + } else if ("triggerRunTimestamp".equals(fieldName)) { + deserializedTriggerRun.triggerRunTimestamp = reader + .getNullable(nonNullReader -> CoreUtils.parseBestOffsetDateTime(nonNullReader.getString())); + } else if ("status".equals(fieldName)) { + deserializedTriggerRun.status = TriggerRunStatus.fromString(reader.getString()); + } else if ("message".equals(fieldName)) { + deserializedTriggerRun.message = reader.getString(); + } else if ("properties".equals(fieldName)) { + Map properties = reader.readMap(reader1 -> reader1.getString()); + deserializedTriggerRun.properties = properties; + } else if ("triggeredPipelines".equals(fieldName)) { + Map triggeredPipelines = reader.readMap(reader1 -> reader1.getString()); + deserializedTriggerRun.triggeredPipelines = triggeredPipelines; + } else if ("runDimension".equals(fieldName)) { + Map runDimension = reader.readMap(reader1 -> reader1.getString()); + deserializedTriggerRun.runDimension = runDimension; + } else if ("dependencyStatus".equals(fieldName)) { + Map dependencyStatus = reader.readMap(reader1 -> reader1.readUntyped()); + deserializedTriggerRun.dependencyStatus = dependencyStatus; + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTriggerRun.additionalProperties = additionalProperties; + + return deserializedTriggerRun; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRunStatus.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRunStatus.java index f1540290569a..459d888a2be6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRunStatus.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRunStatus.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public TriggerRunStatus() { * @param name a name to look for. * @return the corresponding TriggerRunStatus. */ - @JsonCreator public static TriggerRunStatus fromString(String name) { return fromString(name, TriggerRunStatus.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRuntimeState.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRuntimeState.java index 0278f65e03ca..e3aa60d54058 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRuntimeState.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TriggerRuntimeState.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public TriggerRuntimeState() { * @param name a name to look for. * @return the corresponding TriggerRuntimeState. */ - @JsonCreator public static TriggerRuntimeState fromString(String name) { return fromString(name, TriggerRuntimeState.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowFrequency.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowFrequency.java index a0ffb8375cfb..f9eb2e2bbc9e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowFrequency.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowFrequency.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public TumblingWindowFrequency() { * @param name a name to look for. * @return the corresponding TumblingWindowFrequency. */ - @JsonCreator public static TumblingWindowFrequency fromString(String name) { return fromString(name, TumblingWindowFrequency.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java index a9e2754892c1..4130a8039300 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTrigger.java @@ -6,41 +6,42 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TumblingWindowTriggerTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.time.OffsetDateTime; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also * supports backfill scenarios (when start time is in the past). */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TumblingWindowTrigger.class, visible = true) -@JsonTypeName("TumblingWindowTrigger") @Fluent public final class TumblingWindowTrigger extends Trigger { /* * Trigger type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TumblingWindowTrigger"; /* * Pipeline for which runs are created when an event is fired for trigger window that is ready. */ - @JsonProperty(value = "pipeline", required = true) private TriggerPipelineReference pipeline; /* * Tumbling Window Trigger properties. */ - @JsonProperty(value = "typeProperties", required = true) private TumblingWindowTriggerTypeProperties innerTypeProperties = new TumblingWindowTriggerTypeProperties(); + /* + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + */ + private TriggerRuntimeState runtimeState; + /** * Creates an instance of TumblingWindowTrigger class. */ @@ -88,6 +89,17 @@ private TumblingWindowTriggerTypeProperties innerTypeProperties() { return this.innerTypeProperties; } + /** + * Get the runtimeState property: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on + * the Trigger. + * + * @return the runtimeState value. + */ + @Override + public TriggerRuntimeState runtimeState() { + return this.runtimeState; + } + /** * {@inheritDoc} */ @@ -324,4 +336,68 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TumblingWindowTrigger.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("pipeline", this.pipeline); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TumblingWindowTrigger from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TumblingWindowTrigger if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TumblingWindowTrigger. + */ + public static TumblingWindowTrigger fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TumblingWindowTrigger deserializedTumblingWindowTrigger = new TumblingWindowTrigger(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedTumblingWindowTrigger.withDescription(reader.getString()); + } else if ("runtimeState".equals(fieldName)) { + deserializedTumblingWindowTrigger.runtimeState = TriggerRuntimeState.fromString(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTumblingWindowTrigger.withAnnotations(annotations); + } else if ("pipeline".equals(fieldName)) { + deserializedTumblingWindowTrigger.pipeline = TriggerPipelineReference.fromJson(reader); + } else if ("typeProperties".equals(fieldName)) { + deserializedTumblingWindowTrigger.innerTypeProperties + = TumblingWindowTriggerTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedTumblingWindowTrigger.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTumblingWindowTrigger.withAdditionalProperties(additionalProperties); + + return deserializedTumblingWindowTrigger; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java index 4b3481ad7012..5a8412e2083e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TumblingWindowTriggerDependencyReference.java @@ -5,40 +5,30 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Referenced tumbling window trigger dependency. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "type", - defaultImpl = TumblingWindowTriggerDependencyReference.class, - visible = true) -@JsonTypeName("TumblingWindowTriggerDependencyReference") @Fluent public final class TumblingWindowTriggerDependencyReference extends TriggerDependencyReference { /* * The type of dependency reference. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "TumblingWindowTriggerDependencyReference"; /* * Timespan applied to the start time of a tumbling window when evaluating dependency. */ - @JsonProperty(value = "offset") private String offset; /* * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be * used. */ - @JsonProperty(value = "size") private String size; /** @@ -117,4 +107,52 @@ public TumblingWindowTriggerDependencyReference withReferenceTrigger(TriggerRefe public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("referenceTrigger", referenceTrigger()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeStringField("offset", this.offset); + jsonWriter.writeStringField("size", this.size); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TumblingWindowTriggerDependencyReference from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TumblingWindowTriggerDependencyReference if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TumblingWindowTriggerDependencyReference. + */ + public static TumblingWindowTriggerDependencyReference fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TumblingWindowTriggerDependencyReference deserializedTumblingWindowTriggerDependencyReference + = new TumblingWindowTriggerDependencyReference(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("referenceTrigger".equals(fieldName)) { + deserializedTumblingWindowTriggerDependencyReference + .withReferenceTrigger(TriggerReference.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedTumblingWindowTriggerDependencyReference.type = reader.getString(); + } else if ("offset".equals(fieldName)) { + deserializedTumblingWindowTriggerDependencyReference.offset = reader.getString(); + } else if ("size".equals(fieldName)) { + deserializedTumblingWindowTriggerDependencyReference.size = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTumblingWindowTriggerDependencyReference; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java index a53bd200e0bf..4d763b93c121 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TwilioLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.TwilioLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Twilio. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = TwilioLinkedService.class, visible = true) -@JsonTypeName("Twilio") @Fluent public final class TwilioLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Twilio"; /* * Twilio linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private TwilioLinkedServiceTypeProperties innerTypeProperties = new TwilioLinkedServiceTypeProperties(); /** @@ -161,4 +157,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(TwilioLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TwilioLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TwilioLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TwilioLinkedService. + */ + public static TwilioLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TwilioLinkedService deserializedTwilioLinkedService = new TwilioLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedTwilioLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedTwilioLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedTwilioLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedTwilioLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedTwilioLinkedService.innerTypeProperties + = TwilioLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedTwilioLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedTwilioLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedTwilioLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java index 099889599fb8..d3a152c74123 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/TypeConversionSettings.java @@ -5,47 +5,45 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Type conversion settings. */ @Fluent -public final class TypeConversionSettings { +public final class TypeConversionSettings implements JsonSerializable { /* * Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "allowDataTruncation") private Object allowDataTruncation; /* * Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "treatBooleanAsNumber") private Object treatBooleanAsNumber; /* * The format for DateTime values. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dateTimeFormat") private Object dateTimeFormat; /* * The format for DateTimeOffset values. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "dateTimeOffsetFormat") private Object dateTimeOffsetFormat; /* * The format for TimeSpan values. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "timeSpanFormat") private Object timeSpanFormat; /* * The culture used to convert data from/to string. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "culture") private Object culture; /** @@ -193,4 +191,55 @@ public TypeConversionSettings withCulture(Object culture) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("allowDataTruncation", this.allowDataTruncation); + jsonWriter.writeUntypedField("treatBooleanAsNumber", this.treatBooleanAsNumber); + jsonWriter.writeUntypedField("dateTimeFormat", this.dateTimeFormat); + jsonWriter.writeUntypedField("dateTimeOffsetFormat", this.dateTimeOffsetFormat); + jsonWriter.writeUntypedField("timeSpanFormat", this.timeSpanFormat); + jsonWriter.writeUntypedField("culture", this.culture); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TypeConversionSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TypeConversionSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the TypeConversionSettings. + */ + public static TypeConversionSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TypeConversionSettings deserializedTypeConversionSettings = new TypeConversionSettings(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("allowDataTruncation".equals(fieldName)) { + deserializedTypeConversionSettings.allowDataTruncation = reader.readUntyped(); + } else if ("treatBooleanAsNumber".equals(fieldName)) { + deserializedTypeConversionSettings.treatBooleanAsNumber = reader.readUntyped(); + } else if ("dateTimeFormat".equals(fieldName)) { + deserializedTypeConversionSettings.dateTimeFormat = reader.readUntyped(); + } else if ("dateTimeOffsetFormat".equals(fieldName)) { + deserializedTypeConversionSettings.dateTimeOffsetFormat = reader.readUntyped(); + } else if ("timeSpanFormat".equals(fieldName)) { + deserializedTypeConversionSettings.timeSpanFormat = reader.readUntyped(); + } else if ("culture".equals(fieldName)) { + deserializedTypeConversionSettings.culture = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedTypeConversionSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java index 03a52e8fe14b..c48b99339e3a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UntilActivity.java @@ -6,32 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.UntilActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity executes inner activities until the specified boolean expression results to true or timeout is reached, * whichever is earlier. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = UntilActivity.class, visible = true) -@JsonTypeName("Until") @Fluent public final class UntilActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Until"; /* * Until activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private UntilActivityTypeProperties innerTypeProperties = new UntilActivityTypeProperties(); /** @@ -206,4 +203,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(UntilActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UntilActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UntilActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the UntilActivity. + */ + public static UntilActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UntilActivity deserializedUntilActivity = new UntilActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedUntilActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedUntilActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedUntilActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedUntilActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedUntilActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedUntilActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedUntilActivity.innerTypeProperties = UntilActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedUntilActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedUntilActivity.withAdditionalProperties(additionalProperties); + + return deserializedUntilActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java index edaea3f24e2e..96ae56acb5d6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeNodeRequest.java @@ -5,18 +5,22 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Update integration runtime node request. */ @Fluent -public final class UpdateIntegrationRuntimeNodeRequest { +public final class UpdateIntegrationRuntimeNodeRequest + implements JsonSerializable { /* * The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and * maxConcurrentJobs(inclusive) are allowed. */ - @JsonProperty(value = "concurrentJobsLimit") private Integer concurrentJobsLimit; /** @@ -54,4 +58,42 @@ public UpdateIntegrationRuntimeNodeRequest withConcurrentJobsLimit(Integer concu */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeNumberField("concurrentJobsLimit", this.concurrentJobsLimit); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UpdateIntegrationRuntimeNodeRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UpdateIntegrationRuntimeNodeRequest if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the UpdateIntegrationRuntimeNodeRequest. + */ + public static UpdateIntegrationRuntimeNodeRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UpdateIntegrationRuntimeNodeRequest deserializedUpdateIntegrationRuntimeNodeRequest + = new UpdateIntegrationRuntimeNodeRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("concurrentJobsLimit".equals(fieldName)) { + deserializedUpdateIntegrationRuntimeNodeRequest.concurrentJobsLimit + = reader.getNullable(JsonReader::getInt); + } else { + reader.skipChildren(); + } + } + + return deserializedUpdateIntegrationRuntimeNodeRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java index dad1fb5ef482..e9f63344116d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UpdateIntegrationRuntimeRequest.java @@ -5,25 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Update integration runtime request. */ @Fluent -public final class UpdateIntegrationRuntimeRequest { +public final class UpdateIntegrationRuntimeRequest implements JsonSerializable { /* * Enables or disables the auto-update feature of the self-hosted integration runtime. See * https://go.microsoft.com/fwlink/?linkid=854189. */ - @JsonProperty(value = "autoUpdate") private IntegrationRuntimeAutoUpdate autoUpdate; /* * The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on * that time. */ - @JsonProperty(value = "updateDelayOffset") private String updateDelayOffset; /** @@ -83,4 +85,45 @@ public UpdateIntegrationRuntimeRequest withUpdateDelayOffset(String updateDelayO */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("autoUpdate", this.autoUpdate == null ? null : this.autoUpdate.toString()); + jsonWriter.writeStringField("updateDelayOffset", this.updateDelayOffset); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UpdateIntegrationRuntimeRequest from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UpdateIntegrationRuntimeRequest if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the UpdateIntegrationRuntimeRequest. + */ + public static UpdateIntegrationRuntimeRequest fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UpdateIntegrationRuntimeRequest deserializedUpdateIntegrationRuntimeRequest + = new UpdateIntegrationRuntimeRequest(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("autoUpdate".equals(fieldName)) { + deserializedUpdateIntegrationRuntimeRequest.autoUpdate + = IntegrationRuntimeAutoUpdate.fromString(reader.getString()); + } else if ("updateDelayOffset".equals(fieldName)) { + deserializedUpdateIntegrationRuntimeRequest.updateDelayOffset = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedUpdateIntegrationRuntimeRequest; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java index 2e4a59487e0b..7d9c613dc9da 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserAccessPolicy.java @@ -5,43 +5,42 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Get Data Plane read only token request definition. */ @Fluent -public final class UserAccessPolicy { +public final class UserAccessPolicy implements JsonSerializable { /* * The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only access. */ - @JsonProperty(value = "permissions") private String permissions; /* * The resource path to get access relative to factory. Currently only empty string is supported which corresponds * to the factory resource. */ - @JsonProperty(value = "accessResourcePath") private String accessResourcePath; /* * The name of the profile. Currently only the default is supported. The default value is DefaultProfile. */ - @JsonProperty(value = "profileName") private String profileName; /* * Start time for the token. If not specified the current time will be used. */ - @JsonProperty(value = "startTime") private String startTime; /* * Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire * in eight hours. */ - @JsonProperty(value = "expireTime") private String expireTime; /** @@ -165,4 +164,52 @@ public UserAccessPolicy withExpireTime(String expireTime) { */ public void validate() { } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("permissions", this.permissions); + jsonWriter.writeStringField("accessResourcePath", this.accessResourcePath); + jsonWriter.writeStringField("profileName", this.profileName); + jsonWriter.writeStringField("startTime", this.startTime); + jsonWriter.writeStringField("expireTime", this.expireTime); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UserAccessPolicy from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UserAccessPolicy if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the UserAccessPolicy. + */ + public static UserAccessPolicy fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UserAccessPolicy deserializedUserAccessPolicy = new UserAccessPolicy(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("permissions".equals(fieldName)) { + deserializedUserAccessPolicy.permissions = reader.getString(); + } else if ("accessResourcePath".equals(fieldName)) { + deserializedUserAccessPolicy.accessResourcePath = reader.getString(); + } else if ("profileName".equals(fieldName)) { + deserializedUserAccessPolicy.profileName = reader.getString(); + } else if ("startTime".equals(fieldName)) { + deserializedUserAccessPolicy.startTime = reader.getString(); + } else if ("expireTime".equals(fieldName)) { + deserializedUserAccessPolicy.expireTime = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedUserAccessPolicy; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java index 427adcec52e4..01ecf062d5e6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/UserProperty.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * User property. */ @Fluent -public final class UserProperty { +public final class UserProperty implements JsonSerializable { /* * User property name. */ - @JsonProperty(value = "name", required = true) private String name; /* * User property value. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "value", required = true) private Object value; /** @@ -88,4 +90,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(UserProperty.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeUntypedField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of UserProperty from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of UserProperty if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the UserProperty. + */ + public static UserProperty fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + UserProperty deserializedUserProperty = new UserProperty(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedUserProperty.name = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedUserProperty.value = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedUserProperty; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java index 6e93f4b00f64..14f3d1bbd1e2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ValidationActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ValidationActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity verifies that an external resource exists. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ValidationActivity.class, visible = true) -@JsonTypeName("Validation") @Fluent public final class ValidationActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Validation"; /* * Validation activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private ValidationActivityTypeProperties innerTypeProperties = new ValidationActivityTypeProperties(); /** @@ -255,4 +252,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ValidationActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ValidationActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ValidationActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ValidationActivity. + */ + public static ValidationActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ValidationActivity deserializedValidationActivity = new ValidationActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedValidationActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedValidationActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedValidationActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedValidationActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedValidationActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedValidationActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedValidationActivity.innerTypeProperties + = ValidationActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedValidationActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedValidationActivity.withAdditionalProperties(additionalProperties); + + return deserializedValidationActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java index a16b90eaca06..97d54131ef6f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableSpecification.java @@ -6,23 +6,25 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Definition of a single variable for a Pipeline. */ @Fluent -public final class VariableSpecification { +public final class VariableSpecification implements JsonSerializable { /* * Variable type. */ - @JsonProperty(value = "type", required = true) private VariableType type; /* * Default value of variable. */ - @JsonProperty(value = "defaultValue") private Object defaultValue; /** @@ -84,4 +86,44 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(VariableSpecification.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type == null ? null : this.type.toString()); + jsonWriter.writeUntypedField("defaultValue", this.defaultValue); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VariableSpecification from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VariableSpecification if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the VariableSpecification. + */ + public static VariableSpecification fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VariableSpecification deserializedVariableSpecification = new VariableSpecification(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedVariableSpecification.type = VariableType.fromString(reader.getString()); + } else if ("defaultValue".equals(fieldName)) { + deserializedVariableSpecification.defaultValue = reader.readUntyped(); + } else { + reader.skipChildren(); + } + } + + return deserializedVariableSpecification; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableType.java index 4e03e77a3b53..f1e5ca47ba2d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VariableType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public VariableType() { * @param name a name to look for. * @return the corresponding VariableType. */ - @JsonCreator public static VariableType fromString(String name) { return fromString(name, VariableType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java index 858deb07b786..6d51eb26bbeb 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.VerticaLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Vertica linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaLinkedService.class, visible = true) -@JsonTypeName("Vertica") @Fluent public final class VerticaLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Vertica"; /* * Vertica linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private VerticaLinkedServiceTypeProperties innerTypeProperties = new VerticaLinkedServiceTypeProperties(); /** @@ -186,4 +182,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(VerticaLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VerticaLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VerticaLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the VerticaLinkedService. + */ + public static VerticaLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VerticaLinkedService deserializedVerticaLinkedService = new VerticaLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedVerticaLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedVerticaLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedVerticaLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedVerticaLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedVerticaLinkedService.innerTypeProperties + = VerticaLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedVerticaLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedVerticaLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedVerticaLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java index af7d186a5743..48a473a9829c 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Vertica source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaSource.class, visible = true) -@JsonTypeName("VerticaSource") @Fluent public final class VerticaSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "VerticaSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public VerticaSource withDisableMetricsCollection(Object disableMetricsCollectio public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VerticaSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VerticaSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the VerticaSource. + */ + public static VerticaSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VerticaSource deserializedVerticaSource = new VerticaSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedVerticaSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedVerticaSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedVerticaSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedVerticaSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedVerticaSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedVerticaSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedVerticaSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedVerticaSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedVerticaSource.withAdditionalProperties(additionalProperties); + + return deserializedVerticaSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java index 21280c66e604..3cf723169a77 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/VerticaTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.VerticaDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Vertica dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = VerticaTableDataset.class, visible = true) -@JsonTypeName("VerticaTable") @Fluent public final class VerticaTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "VerticaTable"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private VerticaDatasetTypeProperties innerTypeProperties; /** @@ -204,4 +200,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of VerticaTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of VerticaTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the VerticaTableDataset. + */ + public static VerticaTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + VerticaTableDataset deserializedVerticaTableDataset = new VerticaTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedVerticaTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedVerticaTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedVerticaTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedVerticaTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedVerticaTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedVerticaTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedVerticaTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedVerticaTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedVerticaTableDataset.innerTypeProperties = VerticaDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedVerticaTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedVerticaTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java index 925479493a45..5fbed09e1bce 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WaitActivity.java @@ -6,31 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WaitActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; /** * This activity suspends pipeline execution for the specified interval. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WaitActivity.class, visible = true) -@JsonTypeName("Wait") @Fluent public final class WaitActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Wait"; /* * Wait activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private WaitActivityTypeProperties innerTypeProperties = new WaitActivityTypeProperties(); /** @@ -153,4 +150,78 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WaitActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WaitActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WaitActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WaitActivity. + */ + public static WaitActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WaitActivity deserializedWaitActivity = new WaitActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedWaitActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedWaitActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedWaitActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedWaitActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedWaitActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedWaitActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedWaitActivity.innerTypeProperties = WaitActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWaitActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWaitActivity.withAdditionalProperties(additionalProperties); + + return deserializedWaitActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java index c1045e89f4dd..e940804ab001 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WarehouseLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Fabric Warehouse linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseLinkedService.class, visible = true) -@JsonTypeName("Warehouse") @Fluent public final class WarehouseLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Warehouse"; /* * Microsoft Fabric Warehouse linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private WarehouseLinkedServiceTypeProperties innerTypeProperties = new WarehouseLinkedServiceTypeProperties(); /** @@ -344,4 +340,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WarehouseLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WarehouseLinkedService. + */ + public static WarehouseLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseLinkedService deserializedWarehouseLinkedService = new WarehouseLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedWarehouseLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedWarehouseLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedWarehouseLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedWarehouseLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedWarehouseLinkedService.innerTypeProperties + = WarehouseLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWarehouseLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWarehouseLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedWarehouseLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java index 3471a019554a..d8d4b87f26a2 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSink.java @@ -5,56 +5,49 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Microsoft Fabric Warehouse sink. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseSink.class, visible = true) -@JsonTypeName("WarehouseSink") @Fluent public final class WarehouseSink extends CopySink { /* * Copy sink type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WarehouseSink"; /* * SQL pre-copy script. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "preCopyScript") private Object preCopyScript; /* * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "allowCopyCommand") private Object allowCopyCommand; /* * Specifies Copy Command related settings when allowCopyCommand is true. */ - @JsonProperty(value = "copyCommandSettings") private DWCopyCommandSettings copyCommandSettings; /* * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "tableOption") private Object tableOption; /* * Write behavior when copying data into azure Microsoft Fabric Data Warehouse. Type: DWWriteBehaviorEnum (or * Expression with resultType DWWriteBehaviorEnum) */ - @JsonProperty(value = "writeBehavior") private Object writeBehavior; /** @@ -245,4 +238,84 @@ public void validate() { copyCommandSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("writeBatchSize", writeBatchSize()); + jsonWriter.writeUntypedField("writeBatchTimeout", writeBatchTimeout()); + jsonWriter.writeUntypedField("sinkRetryCount", sinkRetryCount()); + jsonWriter.writeUntypedField("sinkRetryWait", sinkRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preCopyScript", this.preCopyScript); + jsonWriter.writeUntypedField("allowCopyCommand", this.allowCopyCommand); + jsonWriter.writeJsonField("copyCommandSettings", this.copyCommandSettings); + jsonWriter.writeUntypedField("tableOption", this.tableOption); + jsonWriter.writeUntypedField("writeBehavior", this.writeBehavior); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseSink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseSink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the WarehouseSink. + */ + public static WarehouseSink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseSink deserializedWarehouseSink = new WarehouseSink(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("writeBatchSize".equals(fieldName)) { + deserializedWarehouseSink.withWriteBatchSize(reader.readUntyped()); + } else if ("writeBatchTimeout".equals(fieldName)) { + deserializedWarehouseSink.withWriteBatchTimeout(reader.readUntyped()); + } else if ("sinkRetryCount".equals(fieldName)) { + deserializedWarehouseSink.withSinkRetryCount(reader.readUntyped()); + } else if ("sinkRetryWait".equals(fieldName)) { + deserializedWarehouseSink.withSinkRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedWarehouseSink.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedWarehouseSink.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedWarehouseSink.type = reader.getString(); + } else if ("preCopyScript".equals(fieldName)) { + deserializedWarehouseSink.preCopyScript = reader.readUntyped(); + } else if ("allowCopyCommand".equals(fieldName)) { + deserializedWarehouseSink.allowCopyCommand = reader.readUntyped(); + } else if ("copyCommandSettings".equals(fieldName)) { + deserializedWarehouseSink.copyCommandSettings = DWCopyCommandSettings.fromJson(reader); + } else if ("tableOption".equals(fieldName)) { + deserializedWarehouseSink.tableOption = reader.readUntyped(); + } else if ("writeBehavior".equals(fieldName)) { + deserializedWarehouseSink.writeBehavior = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWarehouseSink.withAdditionalProperties(additionalProperties); + + return deserializedWarehouseSink; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java index 6c32c5b77e0c..fe02ef213217 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseSource.java @@ -5,43 +5,38 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Microsoft Fabric Warehouse source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseSource.class, visible = true) -@JsonTypeName("WarehouseSource") @Fluent public final class WarehouseSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WarehouseSource"; /* * Microsoft Fabric Warehouse reader query. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderQuery") private Object sqlReaderQuery; /* * Name of the stored procedure for a Microsoft Fabric Warehouse source. This cannot be used at the same time as * SqlReaderQuery. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "sqlReaderStoredProcedureName") private Object sqlReaderStoredProcedureName; /* * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: * object (or Expression with resultType object), itemType: StoredProcedureParameter. */ - @JsonProperty(value = "storedProcedureParameters") private Object storedProcedureParameters; /* @@ -49,20 +44,17 @@ public final class WarehouseSource extends TabularSource { * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "isolationLevel") private Object isolationLevel; /* * The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", * "PhysicalPartitionsOfTable", "DynamicRange". */ - @JsonProperty(value = "partitionOption") private Object partitionOption; /* * The settings that will be leveraged for Sql source partitioning. */ - @JsonProperty(value = "partitionSettings") private SqlPartitionSettings partitionSettings; /** @@ -283,4 +275,87 @@ public void validate() { partitionSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("sqlReaderQuery", this.sqlReaderQuery); + jsonWriter.writeUntypedField("sqlReaderStoredProcedureName", this.sqlReaderStoredProcedureName); + jsonWriter.writeUntypedField("storedProcedureParameters", this.storedProcedureParameters); + jsonWriter.writeUntypedField("isolationLevel", this.isolationLevel); + jsonWriter.writeUntypedField("partitionOption", this.partitionOption); + jsonWriter.writeJsonField("partitionSettings", this.partitionSettings); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseSource if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the WarehouseSource. + */ + public static WarehouseSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseSource deserializedWarehouseSource = new WarehouseSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedWarehouseSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedWarehouseSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedWarehouseSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedWarehouseSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedWarehouseSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedWarehouseSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedWarehouseSource.type = reader.getString(); + } else if ("sqlReaderQuery".equals(fieldName)) { + deserializedWarehouseSource.sqlReaderQuery = reader.readUntyped(); + } else if ("sqlReaderStoredProcedureName".equals(fieldName)) { + deserializedWarehouseSource.sqlReaderStoredProcedureName = reader.readUntyped(); + } else if ("storedProcedureParameters".equals(fieldName)) { + deserializedWarehouseSource.storedProcedureParameters = reader.readUntyped(); + } else if ("isolationLevel".equals(fieldName)) { + deserializedWarehouseSource.isolationLevel = reader.readUntyped(); + } else if ("partitionOption".equals(fieldName)) { + deserializedWarehouseSource.partitionOption = reader.readUntyped(); + } else if ("partitionSettings".equals(fieldName)) { + deserializedWarehouseSource.partitionSettings = SqlPartitionSettings.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWarehouseSource.withAdditionalProperties(additionalProperties); + + return deserializedWarehouseSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java index cc5c653b574c..b4384c3a639e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WarehouseTableDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WarehouseTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Microsoft Fabric Warehouse dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WarehouseTableDataset.class, visible = true) -@JsonTypeName("WarehouseTable") @Fluent public final class WarehouseTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WarehouseTable"; /* * Microsoft Fabric Warehouse dataset properties. */ - @JsonProperty(value = "typeProperties") private WarehouseTableDatasetTypeProperties innerTypeProperties; /** @@ -183,4 +179,80 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WarehouseTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WarehouseTableDataset if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WarehouseTableDataset. + */ + public static WarehouseTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WarehouseTableDataset deserializedWarehouseTableDataset = new WarehouseTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedWarehouseTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedWarehouseTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedWarehouseTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedWarehouseTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedWarehouseTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedWarehouseTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedWarehouseTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedWarehouseTableDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedWarehouseTableDataset.innerTypeProperties + = WarehouseTableDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWarehouseTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedWarehouseTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java index b82c6d630890..ac882f406b36 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivity.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WebActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Web activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebActivity.class, visible = true) -@JsonTypeName("WebActivity") @Fluent public final class WebActivity extends ExecutionActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WebActivity"; /* * Web activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private WebActivityTypeProperties innerTypeProperties = new WebActivityTypeProperties(); /** @@ -416,4 +412,84 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeJsonField("policy", policy()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebActivity. + */ + public static WebActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebActivity deserializedWebActivity = new WebActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedWebActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedWebActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedWebActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedWebActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedWebActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedWebActivity.withUserProperties(userProperties); + } else if ("linkedServiceName".equals(fieldName)) { + deserializedWebActivity.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("policy".equals(fieldName)) { + deserializedWebActivity.withPolicy(ActivityPolicy.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedWebActivity.innerTypeProperties = WebActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWebActivity.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWebActivity.withAdditionalProperties(additionalProperties); + + return deserializedWebActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java index 21a6304817ca..fd77620c7261 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityAuthentication.java @@ -5,56 +5,53 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Web activity authentication properties. */ @Fluent -public final class WebActivityAuthentication { +public final class WebActivityAuthentication implements JsonSerializable { /* * Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal) */ - @JsonProperty(value = "type") private String type; /* * Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal */ - @JsonProperty(value = "pfx") private SecretBase pfx; /* * Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: * string (or Expression with resultType string). */ - @JsonProperty(value = "username") private Object username; /* * Password for the PFX file or basic authentication / Secret when used for ServicePrincipal */ - @JsonProperty(value = "password") private SecretBase password; /* * Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression * with resultType string). */ - @JsonProperty(value = "resource") private Object resource; /* * TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string * (or Expression with resultType string). */ - @JsonProperty(value = "userTenant") private Object userTenant; /* * The credential reference containing authentication information. */ - @JsonProperty(value = "credential") private CredentialReference credential; /** @@ -227,4 +224,58 @@ public void validate() { credential().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("pfx", this.pfx); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeUntypedField("resource", this.resource); + jsonWriter.writeUntypedField("userTenant", this.userTenant); + jsonWriter.writeJsonField("credential", this.credential); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebActivityAuthentication from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebActivityAuthentication if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the WebActivityAuthentication. + */ + public static WebActivityAuthentication fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebActivityAuthentication deserializedWebActivityAuthentication = new WebActivityAuthentication(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedWebActivityAuthentication.type = reader.getString(); + } else if ("pfx".equals(fieldName)) { + deserializedWebActivityAuthentication.pfx = SecretBase.fromJson(reader); + } else if ("username".equals(fieldName)) { + deserializedWebActivityAuthentication.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedWebActivityAuthentication.password = SecretBase.fromJson(reader); + } else if ("resource".equals(fieldName)) { + deserializedWebActivityAuthentication.resource = reader.readUntyped(); + } else if ("userTenant".equals(fieldName)) { + deserializedWebActivityAuthentication.userTenant = reader.readUntyped(); + } else if ("credential".equals(fieldName)) { + deserializedWebActivityAuthentication.credential = CredentialReference.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedWebActivityAuthentication; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityMethod.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityMethod.java index 9b709fd53453..ac652872746a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityMethod.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebActivityMethod.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -47,7 +46,6 @@ public WebActivityMethod() { * @param name a name to look for. * @return the corresponding WebActivityMethod. */ - @JsonCreator public static WebActivityMethod fromString(String name) { return fromString(name, WebActivityMethod.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java index d37b395bfaa6..9a291414142e 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAnonymousAuthentication.java @@ -5,27 +5,19 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authenticationType", - defaultImpl = WebAnonymousAuthentication.class, - visible = true) -@JsonTypeName("Anonymous") @Fluent public final class WebAnonymousAuthentication extends WebLinkedServiceTypeProperties { /* * Type of authentication used to connect to the web table source. */ - @JsonTypeId - @JsonProperty(value = "authenticationType", required = true) private WebAuthenticationType authenticationType = WebAuthenticationType.ANONYMOUS; /** @@ -62,4 +54,46 @@ public WebAnonymousAuthentication withUrl(Object url) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", url()); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebAnonymousAuthentication from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebAnonymousAuthentication if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebAnonymousAuthentication. + */ + public static WebAnonymousAuthentication fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebAnonymousAuthentication deserializedWebAnonymousAuthentication = new WebAnonymousAuthentication(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedWebAnonymousAuthentication.withUrl(reader.readUntyped()); + } else if ("authenticationType".equals(fieldName)) { + deserializedWebAnonymousAuthentication.authenticationType + = WebAuthenticationType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedWebAnonymousAuthentication; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAuthenticationType.java index 1dbf2c7ddb65..c61a5b8d4d87 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -42,7 +41,6 @@ public WebAuthenticationType() { * @param name a name to look for. * @return the corresponding WebAuthenticationType. */ - @JsonCreator public static WebAuthenticationType fromString(String name) { return fromString(name, WebAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java index e454d42dff9f..7c67184f52ea 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebBasicAuthentication.java @@ -6,39 +6,29 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authenticationType", - defaultImpl = WebBasicAuthentication.class, - visible = true) -@JsonTypeName("Basic") @Fluent public final class WebBasicAuthentication extends WebLinkedServiceTypeProperties { /* * Type of authentication used to connect to the web table source. */ - @JsonTypeId - @JsonProperty(value = "authenticationType", required = true) private WebAuthenticationType authenticationType = WebAuthenticationType.BASIC; /* * User name for Basic authentication. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "username", required = true) private Object username; /* * The password for Basic authentication. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /** @@ -131,4 +121,52 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebBasicAuthentication.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", url()); + jsonWriter.writeUntypedField("username", this.username); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebBasicAuthentication from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebBasicAuthentication if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebBasicAuthentication. + */ + public static WebBasicAuthentication fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebBasicAuthentication deserializedWebBasicAuthentication = new WebBasicAuthentication(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedWebBasicAuthentication.withUrl(reader.readUntyped()); + } else if ("username".equals(fieldName)) { + deserializedWebBasicAuthentication.username = reader.readUntyped(); + } else if ("password".equals(fieldName)) { + deserializedWebBasicAuthentication.password = SecretBase.fromJson(reader); + } else if ("authenticationType".equals(fieldName)) { + deserializedWebBasicAuthentication.authenticationType + = WebAuthenticationType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedWebBasicAuthentication; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java index 0f6d75cdf03d..cce1ce665299 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebClientCertificateAuthentication.java @@ -6,40 +6,30 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This * scheme follows mutual authentication; the server must also provide valid credentials to the client. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authenticationType", - defaultImpl = WebClientCertificateAuthentication.class, - visible = true) -@JsonTypeName("ClientCertificate") @Fluent public final class WebClientCertificateAuthentication extends WebLinkedServiceTypeProperties { /* * Type of authentication used to connect to the web table source. */ - @JsonTypeId - @JsonProperty(value = "authenticationType", required = true) private WebAuthenticationType authenticationType = WebAuthenticationType.CLIENT_CERTIFICATE; /* * Base64-encoded contents of a PFX file. */ - @JsonProperty(value = "pfx", required = true) private SecretBase pfx; /* * Password for the PFX file. */ - @JsonProperty(value = "password", required = true) private SecretBase password; /** @@ -132,4 +122,53 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebClientCertificateAuthentication.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", url()); + jsonWriter.writeJsonField("pfx", this.pfx); + jsonWriter.writeJsonField("password", this.password); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebClientCertificateAuthentication from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebClientCertificateAuthentication if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebClientCertificateAuthentication. + */ + public static WebClientCertificateAuthentication fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebClientCertificateAuthentication deserializedWebClientCertificateAuthentication + = new WebClientCertificateAuthentication(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedWebClientCertificateAuthentication.withUrl(reader.readUntyped()); + } else if ("pfx".equals(fieldName)) { + deserializedWebClientCertificateAuthentication.pfx = SecretBase.fromJson(reader); + } else if ("password".equals(fieldName)) { + deserializedWebClientCertificateAuthentication.password = SecretBase.fromJson(reader); + } else if ("authenticationType".equals(fieldName)) { + deserializedWebClientCertificateAuthentication.authenticationType + = WebAuthenticationType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedWebClientCertificateAuthentication; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java index 3a2d1260d959..ada75cdd5481 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedService.java @@ -6,31 +6,27 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Web linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebLinkedService.class, visible = true) -@JsonTypeName("Web") @Fluent public final class WebLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Web"; /* * Web linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private WebLinkedServiceTypeProperties typeProperties; /** @@ -123,4 +119,70 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.typeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebLinkedService. + */ + public static WebLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebLinkedService deserializedWebLinkedService = new WebLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedWebLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedWebLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedWebLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedWebLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedWebLinkedService.typeProperties = WebLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWebLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWebLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedWebLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java index 322e3b414614..a219e056327d 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java @@ -6,33 +6,21 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; /** * Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so * not flattened in SDK models. */ -@JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - property = "authenticationType", - defaultImpl = WebLinkedServiceTypeProperties.class, - visible = true) -@JsonTypeName("WebLinkedServiceTypeProperties") -@JsonSubTypes({ - @JsonSubTypes.Type(name = "Anonymous", value = WebAnonymousAuthentication.class), - @JsonSubTypes.Type(name = "Basic", value = WebBasicAuthentication.class), - @JsonSubTypes.Type(name = "ClientCertificate", value = WebClientCertificateAuthentication.class) }) @Fluent -public class WebLinkedServiceTypeProperties { +public class WebLinkedServiceTypeProperties implements JsonSerializable { /* * Type of authentication used to connect to the web table source. */ - @JsonTypeId - @JsonProperty(value = "authenticationType", required = true) private WebAuthenticationType authenticationType = WebAuthenticationType.fromString("WebLinkedServiceTypeProperties"); @@ -40,7 +28,6 @@ public class WebLinkedServiceTypeProperties { * The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string (or Expression with resultType * string). */ - @JsonProperty(value = "url", required = true) private Object url; /** @@ -94,4 +81,76 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebLinkedServiceTypeProperties.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("url", this.url); + jsonWriter.writeStringField("authenticationType", + this.authenticationType == null ? null : this.authenticationType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebLinkedServiceTypeProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebLinkedServiceTypeProperties if the JsonReader was pointing to an instance of it, or + * null if it was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebLinkedServiceTypeProperties. + */ + public static WebLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("authenticationType".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("Anonymous".equals(discriminatorValue)) { + return WebAnonymousAuthentication.fromJson(readerToUse.reset()); + } else if ("Basic".equals(discriminatorValue)) { + return WebBasicAuthentication.fromJson(readerToUse.reset()); + } else if ("ClientCertificate".equals(discriminatorValue)) { + return WebClientCertificateAuthentication.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static WebLinkedServiceTypeProperties fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebLinkedServiceTypeProperties deserializedWebLinkedServiceTypeProperties + = new WebLinkedServiceTypeProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("url".equals(fieldName)) { + deserializedWebLinkedServiceTypeProperties.url = reader.readUntyped(); + } else if ("authenticationType".equals(fieldName)) { + deserializedWebLinkedServiceTypeProperties.authenticationType + = WebAuthenticationType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedWebLinkedServiceTypeProperties; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java index d447591cdbfd..abad61097164 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebSource.java @@ -5,30 +5,27 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity source for web page table. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebSource.class, visible = true) -@JsonTypeName("WebSource") @Fluent public final class WebSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WebSource"; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -114,4 +111,66 @@ public WebSource withDisableMetricsCollection(Object disableMetricsCollection) { public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the WebSource. + */ + public static WebSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebSource deserializedWebSource = new WebSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedWebSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedWebSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedWebSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedWebSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedWebSource.type = reader.getString(); + } else if ("additionalColumns".equals(fieldName)) { + deserializedWebSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWebSource.withAdditionalProperties(additionalProperties); + + return deserializedWebSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java index 0a86a00726c2..bbae9bafb125 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebTableDataset.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WebTableDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The dataset points to a HTML table in the web page. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebTableDataset.class, visible = true) -@JsonTypeName("WebTable") @Fluent public final class WebTableDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WebTable"; /* * Web table dataset properties. */ - @JsonProperty(value = "typeProperties", required = true) private WebTableDatasetTypeProperties innerTypeProperties = new WebTableDatasetTypeProperties(); /** @@ -190,4 +186,79 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebTableDataset.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebTableDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebTableDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebTableDataset. + */ + public static WebTableDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebTableDataset deserializedWebTableDataset = new WebTableDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedWebTableDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedWebTableDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedWebTableDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedWebTableDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedWebTableDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedWebTableDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedWebTableDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("typeProperties".equals(fieldName)) { + deserializedWebTableDataset.innerTypeProperties = WebTableDatasetTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWebTableDataset.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWebTableDataset.withAdditionalProperties(additionalProperties); + + return deserializedWebTableDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java index 6e8a54d0c9a9..d8e7e72a3d0a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivity.java @@ -6,38 +6,33 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.WebhookActivityTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * WebHook activity. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WebhookActivity.class, visible = true) -@JsonTypeName("WebHook") @Fluent public final class WebhookActivity extends ControlActivity { /* * Type of activity. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WebHook"; /* * WebHook activity properties. */ - @JsonProperty(value = "typeProperties", required = true) private WebhookActivityTypeProperties innerTypeProperties = new WebhookActivityTypeProperties(); /* * Activity policy. */ - @JsonProperty(value = "policy") private SecureInputOutputPolicy policy; /** @@ -337,4 +332,81 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(WebhookActivity.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", name()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeStringField("state", state() == null ? null : state().toString()); + jsonWriter.writeStringField("onInactiveMarkAs", + onInactiveMarkAs() == null ? null : onInactiveMarkAs().toString()); + jsonWriter.writeArrayField("dependsOn", dependsOn(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("userProperties", userProperties(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("policy", this.policy); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WebhookActivity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WebhookActivity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the WebhookActivity. + */ + public static WebhookActivity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WebhookActivity deserializedWebhookActivity = new WebhookActivity(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedWebhookActivity.withName(reader.getString()); + } else if ("description".equals(fieldName)) { + deserializedWebhookActivity.withDescription(reader.getString()); + } else if ("state".equals(fieldName)) { + deserializedWebhookActivity.withState(ActivityState.fromString(reader.getString())); + } else if ("onInactiveMarkAs".equals(fieldName)) { + deserializedWebhookActivity + .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.fromString(reader.getString())); + } else if ("dependsOn".equals(fieldName)) { + List dependsOn + = reader.readArray(reader1 -> ActivityDependency.fromJson(reader1)); + deserializedWebhookActivity.withDependsOn(dependsOn); + } else if ("userProperties".equals(fieldName)) { + List userProperties = reader.readArray(reader1 -> UserProperty.fromJson(reader1)); + deserializedWebhookActivity.withUserProperties(userProperties); + } else if ("typeProperties".equals(fieldName)) { + deserializedWebhookActivity.innerTypeProperties = WebhookActivityTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedWebhookActivity.type = reader.getString(); + } else if ("policy".equals(fieldName)) { + deserializedWebhookActivity.policy = SecureInputOutputPolicy.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedWebhookActivity.withAdditionalProperties(additionalProperties); + + return deserializedWebhookActivity; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivityMethod.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivityMethod.java index c097409fcc70..0285d796ac0a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivityMethod.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebhookActivityMethod.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -32,7 +31,6 @@ public WebhookActivityMethod() { * @param name a name to look for. * @return the corresponding WebhookActivityMethod. */ - @JsonCreator public static WebhookActivityMethod fromString(String name) { return fromString(name, WebhookActivityMethod.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java index 8c43ec72a489..4d18529b2755 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WranglingDataFlow.java @@ -5,31 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.PowerQueryTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; import java.util.List; /** * Power Query data flow. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WranglingDataFlow.class, visible = true) -@JsonTypeName("WranglingDataFlow") @Fluent public final class WranglingDataFlow extends DataFlow { /* * Type of data flow. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "WranglingDataFlow"; /* * PowerQuery data flow type properties. */ - @JsonProperty(value = "typeProperties") private PowerQueryTypeProperties innerTypeProperties; /** @@ -165,4 +160,53 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of WranglingDataFlow from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of WranglingDataFlow if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the WranglingDataFlow. + */ + public static WranglingDataFlow fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + WranglingDataFlow deserializedWranglingDataFlow = new WranglingDataFlow(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("description".equals(fieldName)) { + deserializedWranglingDataFlow.withDescription(reader.getString()); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedWranglingDataFlow.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedWranglingDataFlow.withFolder(DataFlowFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedWranglingDataFlow.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedWranglingDataFlow.innerTypeProperties = PowerQueryTypeProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedWranglingDataFlow; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java index ae6f38a8a7f0..3cf3d9cb19bd 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.XeroLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Xero Service linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroLinkedService.class, visible = true) -@JsonTypeName("Xero") @Fluent public final class XeroLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Xero"; /* * Xero Service linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private XeroLinkedServiceTypeProperties innerTypeProperties = new XeroLinkedServiceTypeProperties(); /** @@ -311,4 +307,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(XeroLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XeroLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XeroLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the XeroLinkedService. + */ + public static XeroLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XeroLinkedService deserializedXeroLinkedService = new XeroLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedXeroLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedXeroLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedXeroLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedXeroLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedXeroLinkedService.innerTypeProperties + = XeroLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedXeroLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXeroLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedXeroLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java index 203ea60129af..aee4de980069 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Xero Service dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroObjectDataset.class, visible = true) -@JsonTypeName("XeroObject") @Fluent public final class XeroObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "XeroObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XeroObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XeroObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the XeroObjectDataset. + */ + public static XeroObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XeroObjectDataset deserializedXeroObjectDataset = new XeroObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedXeroObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedXeroObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedXeroObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedXeroObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedXeroObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedXeroObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedXeroObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedXeroObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedXeroObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXeroObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedXeroObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java index f3ea47c84190..742123b3a418 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XeroSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Xero Service source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XeroSource.class, visible = true) -@JsonTypeName("XeroSource") @Fluent public final class XeroSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "XeroSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public XeroSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XeroSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XeroSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the XeroSource. + */ + public static XeroSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XeroSource deserializedXeroSource = new XeroSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedXeroSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedXeroSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedXeroSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedXeroSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedXeroSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedXeroSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedXeroSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedXeroSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXeroSource.withAdditionalProperties(additionalProperties); + + return deserializedXeroSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java index 671c03828ed3..f9b46bd488b7 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.XmlDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Xml dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlDataset.class, visible = true) -@JsonTypeName("Xml") @Fluent public final class XmlDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Xml"; /* * Xml dataset properties. */ - @JsonProperty(value = "typeProperties") private XmlDatasetTypeProperties innerTypeProperties; /** @@ -231,4 +227,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XmlDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XmlDataset if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the XmlDataset. + */ + public static XmlDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XmlDataset deserializedXmlDataset = new XmlDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedXmlDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedXmlDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedXmlDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedXmlDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedXmlDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedXmlDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedXmlDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedXmlDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedXmlDataset.innerTypeProperties = XmlDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXmlDataset.withAdditionalProperties(additionalProperties); + + return deserializedXmlDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java index 5a0afd4a170e..fbdf8f21a3e1 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlReadSettings.java @@ -5,50 +5,44 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * Xml read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlReadSettings.class, visible = true) -@JsonTypeName("XmlReadSettings") @Fluent public final class XmlReadSettings extends FormatReadSettings { /* * The read setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "XmlReadSettings"; /* * Compression settings. */ - @JsonProperty(value = "compressionProperties") private CompressionReadSettings compressionProperties; /* * Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. * Type: string (or Expression with resultType string). */ - @JsonProperty(value = "validationMode") private Object validationMode; /* * Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with * resultType boolean). */ - @JsonProperty(value = "detectDataType") private Object detectDataType; /* * Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType * boolean). */ - @JsonProperty(value = "namespaces") private Object namespaces; /* @@ -56,7 +50,6 @@ public final class XmlReadSettings extends FormatReadSettings { * is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. * Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). */ - @JsonProperty(value = "namespacePrefixes") private Object namespacePrefixes; /** @@ -199,4 +192,66 @@ public void validate() { compressionProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("compressionProperties", this.compressionProperties); + jsonWriter.writeUntypedField("validationMode", this.validationMode); + jsonWriter.writeUntypedField("detectDataType", this.detectDataType); + jsonWriter.writeUntypedField("namespaces", this.namespaces); + jsonWriter.writeUntypedField("namespacePrefixes", this.namespacePrefixes); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XmlReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XmlReadSettings if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the XmlReadSettings. + */ + public static XmlReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XmlReadSettings deserializedXmlReadSettings = new XmlReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedXmlReadSettings.type = reader.getString(); + } else if ("compressionProperties".equals(fieldName)) { + deserializedXmlReadSettings.compressionProperties = CompressionReadSettings.fromJson(reader); + } else if ("validationMode".equals(fieldName)) { + deserializedXmlReadSettings.validationMode = reader.readUntyped(); + } else if ("detectDataType".equals(fieldName)) { + deserializedXmlReadSettings.detectDataType = reader.readUntyped(); + } else if ("namespaces".equals(fieldName)) { + deserializedXmlReadSettings.namespaces = reader.readUntyped(); + } else if ("namespacePrefixes".equals(fieldName)) { + deserializedXmlReadSettings.namespacePrefixes = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXmlReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedXmlReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java index d095b52e42b1..5b4970eb6282 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/XmlSource.java @@ -5,42 +5,37 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Xml source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = XmlSource.class, visible = true) -@JsonTypeName("XmlSource") @Fluent public final class XmlSource extends CopySource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "XmlSource"; /* * Xml store settings. */ - @JsonProperty(value = "storeSettings") private StoreReadSettings storeSettings; /* * Xml format settings. */ - @JsonProperty(value = "formatSettings") private XmlReadSettings formatSettings; /* * Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or * Expression with resultType array of objects). */ - @JsonProperty(value = "additionalColumns") private Object additionalColumns; /** @@ -172,4 +167,72 @@ public void validate() { formatSettings().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("storeSettings", this.storeSettings); + jsonWriter.writeJsonField("formatSettings", this.formatSettings); + jsonWriter.writeUntypedField("additionalColumns", this.additionalColumns); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of XmlSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of XmlSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the XmlSource. + */ + public static XmlSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + XmlSource deserializedXmlSource = new XmlSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedXmlSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedXmlSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedXmlSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedXmlSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedXmlSource.type = reader.getString(); + } else if ("storeSettings".equals(fieldName)) { + deserializedXmlSource.storeSettings = StoreReadSettings.fromJson(reader); + } else if ("formatSettings".equals(fieldName)) { + deserializedXmlSource.formatSettings = XmlReadSettings.fromJson(reader); + } else if ("additionalColumns".equals(fieldName)) { + deserializedXmlSource.additionalColumns = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedXmlSource.withAdditionalProperties(additionalProperties); + + return deserializedXmlSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskAuthenticationType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskAuthenticationType.java index 839bc37d4162..115baeb81203 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskAuthenticationType.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskAuthenticationType.java @@ -5,7 +5,6 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.util.ExpandableStringEnum; -import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** @@ -37,7 +36,6 @@ public ZendeskAuthenticationType() { * @param name a name to look for. * @return the corresponding ZendeskAuthenticationType. */ - @JsonCreator public static ZendeskAuthenticationType fromString(String name) { return fromString(name, ZendeskAuthenticationType.class); } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java index 2749adfa6abb..1dba49e6e30a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZendeskLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ZendeskLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Linked service for Zendesk. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZendeskLinkedService.class, visible = true) -@JsonTypeName("Zendesk") @Fluent public final class ZendeskLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Zendesk"; /* * Zendesk linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ZendeskLinkedServiceTypeProperties innerTypeProperties = new ZendeskLinkedServiceTypeProperties(); /** @@ -255,4 +251,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ZendeskLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZendeskLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZendeskLinkedService if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ZendeskLinkedService. + */ + public static ZendeskLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZendeskLinkedService deserializedZendeskLinkedService = new ZendeskLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedZendeskLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedZendeskLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedZendeskLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedZendeskLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedZendeskLinkedService.innerTypeProperties + = ZendeskLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedZendeskLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedZendeskLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedZendeskLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java index 942b3a047cfc..e3697b237a1f 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZipDeflateReadSettings.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * The ZipDeflate compression read settings. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZipDeflateReadSettings.class, visible = true) -@JsonTypeName("ZipDeflateReadSettings") @Fluent public final class ZipDeflateReadSettings extends CompressionReadSettings { /* * The Compression setting type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ZipDeflateReadSettings"; /* * Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). */ - @JsonProperty(value = "preserveZipFileNameAsFolder") private Object preserveZipFileNameAsFolder; /** @@ -77,4 +74,54 @@ public ZipDeflateReadSettings withPreserveZipFileNameAsFolder(Object preserveZip public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("preserveZipFileNameAsFolder", this.preserveZipFileNameAsFolder); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZipDeflateReadSettings from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZipDeflateReadSettings if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the ZipDeflateReadSettings. + */ + public static ZipDeflateReadSettings fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZipDeflateReadSettings deserializedZipDeflateReadSettings = new ZipDeflateReadSettings(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("type".equals(fieldName)) { + deserializedZipDeflateReadSettings.type = reader.getString(); + } else if ("preserveZipFileNameAsFolder".equals(fieldName)) { + deserializedZipDeflateReadSettings.preserveZipFileNameAsFolder = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedZipDeflateReadSettings.withAdditionalProperties(additionalProperties); + + return deserializedZipDeflateReadSettings; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java index 3fb0de1b5c4c..48fe5ad6cfb8 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoLinkedService.java @@ -6,32 +6,28 @@ import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.ZohoLinkedServiceTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Zoho server linked service. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoLinkedService.class, visible = true) -@JsonTypeName("Zoho") @Fluent public final class ZohoLinkedService extends LinkedService { /* * Type of linked service. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "Zoho"; /* * Zoho server linked service properties. */ - @JsonProperty(value = "typeProperties", required = true) private ZohoLinkedServiceTypeProperties innerTypeProperties = new ZohoLinkedServiceTypeProperties(); /** @@ -284,4 +280,71 @@ public void validate() { } private static final ClientLogger LOGGER = new ClientLogger(ZohoLinkedService.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectVia", connectVia()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + jsonWriter.writeStringField("type", this.type); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZohoLinkedService from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZohoLinkedService if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ZohoLinkedService. + */ + public static ZohoLinkedService fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZohoLinkedService deserializedZohoLinkedService = new ZohoLinkedService(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectVia".equals(fieldName)) { + deserializedZohoLinkedService.withConnectVia(IntegrationRuntimeReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedZohoLinkedService.withDescription(reader.getString()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedZohoLinkedService.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedZohoLinkedService.withAnnotations(annotations); + } else if ("typeProperties".equals(fieldName)) { + deserializedZohoLinkedService.innerTypeProperties + = ZohoLinkedServiceTypeProperties.fromJson(reader); + } else if ("type".equals(fieldName)) { + deserializedZohoLinkedService.type = reader.getString(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedZohoLinkedService.withAdditionalProperties(additionalProperties); + + return deserializedZohoLinkedService; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java index a798a635ed2a..3b6d9dc699c0 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoObjectDataset.java @@ -5,32 +5,28 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import java.io.IOException; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Zoho server dataset. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoObjectDataset.class, visible = true) -@JsonTypeName("ZohoObject") @Fluent public final class ZohoObjectDataset extends Dataset { /* * Type of dataset. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ZohoObject"; /* * Properties specific to this dataset type. */ - @JsonProperty(value = "typeProperties") private GenericDatasetTypeProperties innerTypeProperties; /** @@ -156,4 +152,79 @@ public void validate() { innerTypeProperties().validate(); } } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("linkedServiceName", linkedServiceName()); + jsonWriter.writeStringField("description", description()); + jsonWriter.writeUntypedField("structure", structure()); + jsonWriter.writeUntypedField("schema", schema()); + jsonWriter.writeMapField("parameters", parameters(), (writer, element) -> writer.writeJson(element)); + jsonWriter.writeArrayField("annotations", annotations(), (writer, element) -> writer.writeUntyped(element)); + jsonWriter.writeJsonField("folder", folder()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeJsonField("typeProperties", this.innerTypeProperties); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZohoObjectDataset from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZohoObjectDataset if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ZohoObjectDataset. + */ + public static ZohoObjectDataset fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZohoObjectDataset deserializedZohoObjectDataset = new ZohoObjectDataset(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("linkedServiceName".equals(fieldName)) { + deserializedZohoObjectDataset.withLinkedServiceName(LinkedServiceReference.fromJson(reader)); + } else if ("description".equals(fieldName)) { + deserializedZohoObjectDataset.withDescription(reader.getString()); + } else if ("structure".equals(fieldName)) { + deserializedZohoObjectDataset.withStructure(reader.readUntyped()); + } else if ("schema".equals(fieldName)) { + deserializedZohoObjectDataset.withSchema(reader.readUntyped()); + } else if ("parameters".equals(fieldName)) { + Map parameters + = reader.readMap(reader1 -> ParameterSpecification.fromJson(reader1)); + deserializedZohoObjectDataset.withParameters(parameters); + } else if ("annotations".equals(fieldName)) { + List annotations = reader.readArray(reader1 -> reader1.readUntyped()); + deserializedZohoObjectDataset.withAnnotations(annotations); + } else if ("folder".equals(fieldName)) { + deserializedZohoObjectDataset.withFolder(DatasetFolder.fromJson(reader)); + } else if ("type".equals(fieldName)) { + deserializedZohoObjectDataset.type = reader.getString(); + } else if ("typeProperties".equals(fieldName)) { + deserializedZohoObjectDataset.innerTypeProperties = GenericDatasetTypeProperties.fromJson(reader); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedZohoObjectDataset.withAdditionalProperties(additionalProperties); + + return deserializedZohoObjectDataset; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java index 54f35fd440dc..068afb44cf45 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ZohoSource.java @@ -5,29 +5,26 @@ package com.azure.resourcemanager.datafactory.models; import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeId; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.annotation.JsonTypeName; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; /** * A copy activity Zoho server source. */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = ZohoSource.class, visible = true) -@JsonTypeName("ZohoSource") @Fluent public final class ZohoSource extends TabularSource { /* * Copy source type. */ - @JsonTypeId - @JsonProperty(value = "type", required = true) private String type = "ZohoSource"; /* * A query to retrieve data from source. Type: string (or Expression with resultType string). */ - @JsonProperty(value = "query") private Object query; /** @@ -131,4 +128,72 @@ public ZohoSource withDisableMetricsCollection(Object disableMetricsCollection) public void validate() { super.validate(); } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount()); + jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait()); + jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections()); + jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection()); + jsonWriter.writeUntypedField("queryTimeout", queryTimeout()); + jsonWriter.writeUntypedField("additionalColumns", additionalColumns()); + jsonWriter.writeStringField("type", this.type); + jsonWriter.writeUntypedField("query", this.query); + if (additionalProperties() != null) { + for (Map.Entry additionalProperty : additionalProperties().entrySet()) { + jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue()); + } + } + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ZohoSource from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ZohoSource if the JsonReader was pointing to an instance of it, or null if it was pointing + * to JSON null. + * @throws IOException If an error occurs while reading the ZohoSource. + */ + public static ZohoSource fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ZohoSource deserializedZohoSource = new ZohoSource(); + Map additionalProperties = null; + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("sourceRetryCount".equals(fieldName)) { + deserializedZohoSource.withSourceRetryCount(reader.readUntyped()); + } else if ("sourceRetryWait".equals(fieldName)) { + deserializedZohoSource.withSourceRetryWait(reader.readUntyped()); + } else if ("maxConcurrentConnections".equals(fieldName)) { + deserializedZohoSource.withMaxConcurrentConnections(reader.readUntyped()); + } else if ("disableMetricsCollection".equals(fieldName)) { + deserializedZohoSource.withDisableMetricsCollection(reader.readUntyped()); + } else if ("queryTimeout".equals(fieldName)) { + deserializedZohoSource.withQueryTimeout(reader.readUntyped()); + } else if ("additionalColumns".equals(fieldName)) { + deserializedZohoSource.withAdditionalColumns(reader.readUntyped()); + } else if ("type".equals(fieldName)) { + deserializedZohoSource.type = reader.getString(); + } else if ("query".equals(fieldName)) { + deserializedZohoSource.query = reader.readUntyped(); + } else { + if (additionalProperties == null) { + additionalProperties = new LinkedHashMap<>(); + } + + additionalProperties.put(fieldName, reader.readUntyped()); + } + } + deserializedZohoSource.withAdditionalProperties(additionalProperties); + + return deserializedZohoSource; + }); + } } diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java index 8f1fc8bbd693..6768f292c573 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/module-info.java @@ -8,6 +8,6 @@ exports com.azure.resourcemanager.datafactory.fluent; exports com.azure.resourcemanager.datafactory.fluent.models; exports com.azure.resourcemanager.datafactory.models; - opens com.azure.resourcemanager.datafactory.fluent.models to com.azure.core, com.fasterxml.jackson.databind; - opens com.azure.resourcemanager.datafactory.models to com.azure.core, com.fasterxml.jackson.databind; + opens com.azure.resourcemanager.datafactory.fluent.models to com.azure.core; + opens com.azure.resourcemanager.datafactory.models to com.azure.core; } \ No newline at end of file diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json index eca3dd44101f..8878e547a798 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-datafactory/reflect-config.json @@ -1,5671 +1 @@ -[ { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AccessPolicyResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ActivityRunsQueryResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AddDataFlowToDebugSessionResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonMwsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForOracleTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonS3CompatibleLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonS3DatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AmazonS3LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AppFiguresLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AppendVariableActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AsanaLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AvroDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzPowerShellSetupTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureBatchLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureBlobDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureBlobStorageLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerCommandActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeAnalyticsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDeltaLakeDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDetltaLakeLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureFileStorageLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureKeyVaultLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMLBatchExecutionActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMLExecutePipelineActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMLLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMLServiceLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMLUpdateResourceActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMariaDBLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSearchIndexDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSearchLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDatabaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMILinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMITableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSqlTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureStorageLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureSynapseArtifactsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.AzureTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.BinaryDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.BlobEventsTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.BlobTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CassandraLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CassandraTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ChainingTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCapture", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCaptureResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CmdkeySetupTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsEntityDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ConcurLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CopyActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CosmosDbLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiCollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CosmosDbSqlApiCollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CouchbaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CreateDataFlowDebugSessionResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CreateRunResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CustomActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.CustomEventsTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugCommandResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugSessionInfoInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DataFlowResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DataLakeAnalyticsUsqlActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DatabricksNotebookActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkJarActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkPythonActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DatasetResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DataworldLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.Db2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.Db2TableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DeleteActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DelimitedTextDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DocumentDbCollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DrillDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DrillLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmEntityDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsEntityDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.DynamicsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.EloquaLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.EnvironmentVariableSetupTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExcelDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExecuteDataFlowActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExecutePipelineActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExecutePowerQueryActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExecuteSsisPackageActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExposureControlBatchResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ExposureControlResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FactoryInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FactoryProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FactoryUpdateProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FailActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FileServerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FileShareDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FilterActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FlowletTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ForEachActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.FtpServerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GetMetadataActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GitHubAccessTokenResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GlobalParameterResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleAdWordsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2DatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleCloudStorageLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GoogleSheetsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GreenplumDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.GreenplumLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HBaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightHiveActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightMapReduceActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightOnDemandLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightPigActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightSparkActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HDInsightStreamingActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HdfsLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HiveDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HiveLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HttpDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HttpLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.HubspotLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IfConditionActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ImpalaDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ImpalaLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.InformixLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.InformixTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeAuthKeysInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeConnectionInfoInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeMonitoringDataInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeNodeIpAddressInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeStatusResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.JiraLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.JsonDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.LakeHouseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.LakeHouseTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.LicensedComponentSetupTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.LinkedServiceResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.LookupActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MagentoLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeStatusTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedIntegrationRuntimeTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedPrivateEndpointResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ManagedVirtualNetworkResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MapperTableProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MappingDataFlowTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MariaDBLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MarketoLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasCollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbCollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2CollectionDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MySqlLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.MySqlTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.NetezzaLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.NetezzaTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ODataLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ODataResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OdbcLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OdbcTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.Office365DatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.Office365LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OperationInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OperationProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OracleCloudStorageLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OracleLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OracleServiceCloudLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OracleTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.OrcDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ParquetDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PaypalLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PhoenixDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PhoenixLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.Pipeline", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PipelineResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PipelineRunInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PipelineRunsQueryResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2TableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PowerQueryTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PrestoDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PrestoLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PrivateEndpointConnectionResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.PrivateLinkResourcesWrapperInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.QuickBooksLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.QuickbaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.RelationalTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.RerunTumblingWindowTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ResponsysLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.RestResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.RestServiceLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceMarketingCloudLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceObjectDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudObjectDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2ObjectDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2ObjectDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapBWLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapEccLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapEccResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapHanaLinkedServiceProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapHanaTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapOdpLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapOdpResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapTableLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SapTableResourceDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ScheduleTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ScriptActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeStatusTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ServiceNowLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ServiceNowV2LinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ServicePrincipalCredentialTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SetVariableActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SftpServerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ShopifyLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SmartsheetLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SnowflakeLinkedV2ServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SparkDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SparkLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SqlServerLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SqlServerStoredProcedureActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SqlServerTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SquareLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SsisLogLocationTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SsisObjectMetadataListResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SsisObjectMetadataStatusResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SsisPackageLocationTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SwitchActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SybaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SybaseTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SynapseNotebookActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.SynapseSparkJobActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TeamDeskLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TeradataLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TeradataTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TriggerQueryResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TriggerResourceInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TriggerRunsQueryResponseInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TriggerSubscriptionOperationStatusInner", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TumblingWindowTriggerTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.TwilioLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.UntilActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ValidationActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.VerticaDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.VerticaLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WaitActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WarehouseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WarehouseTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WebActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WebTableDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.WebhookActivityTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.XeroLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.XmlDatasetTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ZendeskLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.fluent.models.ZohoLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Activity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ActivityDependency", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ActivityPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ActivityRun", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ActivityState", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonMwsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonMwsObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonMwsSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForOraclePartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlServerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlServerSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlServerTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRedshiftLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRedshiftSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonRedshiftTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3CompatibleLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3CompatibleLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3CompatibleReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3Dataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3Location", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AmazonS3ReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AppFiguresLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AppendVariableActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ArmIdWrapper", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AsanaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AvroDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AvroFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AvroSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AvroSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AvroWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzPowerShellSetup", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBatchLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobFSWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobStorageLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobStorageReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureBlobStorageWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataExplorerCommandActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataExplorerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataExplorerSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataExplorerSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataExplorerTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeAnalyticsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeExportCommand", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeImportCommand", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureDatabricksLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFileStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFileStorageLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFileStorageReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFileStorageWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFunctionActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFunctionActivityMethod", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureFunctionLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureKeyVaultLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureKeyVaultSecretReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLBatchExecutionActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLExecutePipelineActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLServiceLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLUpdateResourceActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMLWebServiceFile", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMariaDBLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMariaDBSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMariaDBTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMySqlLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMySqlSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMySqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureMySqlTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzurePostgreSqlLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzurePostgreSqlSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzurePostgreSqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzurePostgreSqlTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureQueueSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSearchIndexDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSearchIndexSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSearchIndexWriteBehaviorType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSearchLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlDWAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlDWLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlDWTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlDatabaseAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlDatabaseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlMIAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlMILinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlMITableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSqlTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureStorageAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureSynapseArtifactsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureTableSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureTableSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.AzureTableStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BigDataPoolReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BinaryDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BinaryReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BinarySink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BinarySource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BlobEventTypes", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BlobEventsTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BlobSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BlobSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.BlobTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CassandraLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CassandraSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CassandraSourceReadConsistencyLevels", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CassandraTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ChainingTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ChangeDataCaptureListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CmdkeySetup", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CmkIdentityDefinition", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsEntityDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ComponentSetup", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CompressionReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConcurLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConcurObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConcurSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConfigurationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConnectionStateProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ConnectionType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ContinuationSettingsReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ControlActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopyActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopyActivityLogSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopyComputeScaleProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopySink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopySource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CopyTranslator", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbConnectionMode", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiCollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiCollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CouchbaseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CouchbaseSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CouchbaseTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Credential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CredentialListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CredentialReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CredentialReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomActivityReferenceObject", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomDataSourceLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomEventsTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.CustomSetupBase", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DWCopyCommandDefaultValue", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DWCopyCommandSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlow", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowComputeType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugPackage", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowDebugResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowFolder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataLakeAnalyticsUsqlActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataMapperMapping", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatabricksNotebookActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatabricksSparkJarActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatabricksSparkPythonActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Dataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetCompression", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetDebugResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetFolder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetSchemaDataElement", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DatasetStorageFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DataworldLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DayOfWeek", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DaysOfWeek", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Db2AuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Db2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Db2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Db2TableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DeleteActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DelimitedTextDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DelimitedTextReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DelimitedTextSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DelimitedTextSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DelimitedTextWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DependencyCondition", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DependencyReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DistcpSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DocumentDbCollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DocumentDbCollectionSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DocumentDbCollectionSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DrillLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DrillSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DrillTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsAXLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsAXResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsAXSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsCrmEntityDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsCrmLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsCrmSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsCrmSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsEntityDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsSinkWriteBehavior", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.DynamicsSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EloquaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EloquaObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EloquaSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EncryptionConfiguration", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EntityReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EnvironmentVariableSetup", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.EventSubscriptionStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExcelDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExcelSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecuteDataFlowActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecutePipelineActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecutePipelineActivityPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecuteSsisPackageActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecuteWranglingDataflowActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExecutionActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExportSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExposureControlRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Expression", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExpressionV2", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ExpressionV2Type", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryGitHubConfiguration", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryIdentity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryIdentityType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryRepoConfiguration", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryRepoUpdate", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryUpdateParameters", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FactoryVstsConfiguration", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FailActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileServerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileServerLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileServerReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileServerWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileShareDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileSystemSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FileSystemSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FilterActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Flowlet", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ForEachActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FormatReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FormatWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FrequencyType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FtpAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FtpReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FtpServerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.FtpServerLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GetDataFactoryOperationStatusResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GetMetadataActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GitHubAccessTokenRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GitHubClientSecret", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GlobalParameterListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GlobalParameterType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleAdWordsAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleAdWordsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleAdWordsObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleAdWordsSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQuerySource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2AuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2ObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleCloudStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleCloudStorageLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleCloudStorageReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GoogleSheetsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GreenplumLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GreenplumSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.GreenplumTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HBaseAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HBaseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HBaseObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HBaseSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightHiveActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightMapReduceActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightOnDemandLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightPigActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightSparkActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HDInsightStreamingActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HdfsLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HdfsLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HdfsReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HdfsSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveServerType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HiveThriftTransportProtocol", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpServerLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HttpSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HubspotLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HubspotObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.HubspotSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IfConditionActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ImpalaAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ImpalaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ImpalaObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ImpalaSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ImportSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.InformixLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.InformixSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.InformixSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.InformixTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntime", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAuthKeyName", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAutoUpdate", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeComputeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataProxyProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeEdition", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeEntityReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeLicenseType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeMonitoringData", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeRegenerateKeyParameters", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeSsisCatalogInfo", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeSsisProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeState", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeUpdateResult", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.IntegrationRuntimeVNetProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JiraLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JiraObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JiraSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.JsonWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseTableSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseTableSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LakeHouseWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntime", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeKeyAuthorization", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRbacAuthorization", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedServiceListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LinkedServiceReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LogLocationSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LogSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LogStorageSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.LookupActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MagentoLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MagentoObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MagentoSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntime", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeError", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeNode", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeNodeStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeOperationResult", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedIntegrationRuntimeStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperAttributeMapping", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperAttributeMappings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperAttributeReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperConnection", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperConnectionReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperTable", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperTableSchema", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MappingDataFlow", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MappingType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MariaDBLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MariaDBSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MariaDBTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MarketoLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MarketoObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MarketoSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MetadataItem", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MicrosoftAccessLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MicrosoftAccessSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MicrosoftAccessSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MicrosoftAccessTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbAtlasCollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbAtlasLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbAtlasSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbAtlasSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbCollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbCursorMethodsProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbV2CollectionDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbV2Sink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MongoDbV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MultiplePipelineTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MySqlLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MySqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.MySqlTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NetezzaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NetezzaPartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NetezzaSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NetezzaTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NotebookParameter", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NotebookParameterType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.NotebookReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ODataAadServicePrincipalCredentialType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ODataAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ODataLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ODataResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ODataSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OdbcLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OdbcSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OdbcSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OdbcTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Office365Dataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Office365LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Office365Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationDisplay", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationLogSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationMetricAvailability", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationMetricDimension", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationMetricSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OperationServiceSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleCloudStorageLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleCloudStorageLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleCloudStorageReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OraclePartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleServiceCloudLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleServiceCloudObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleServiceCloudSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OracleTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OrcDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OrcFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OrcSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OrcSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.OrcWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PackageStore", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParameterSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParameterType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ParquetWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PaypalLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PaypalObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PaypalSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PhoenixAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PhoenixLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PhoenixObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PhoenixSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineExternalComputeScaleProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineFolder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelinePolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PipelineRunInvokedBy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PolybaseSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PolybaseSettingsRejectType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PostgreSqlV2TableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PowerQuerySink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PowerQuerySinkMapping", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PowerQuerySource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrestoAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrestoLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrestoObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrestoSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateEndpoint", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequestResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateLinkResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PrivateLinkResourceProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PublicNetworkAccess", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.PurviewConfiguration", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.QueryDataFlowDebugSessionsResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.QuickBooksLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.QuickBooksObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.QuickBooksSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.QuickbaseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RecurrenceFrequency", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RecurrenceSchedule", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RedirectIncompatibleRowSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RedshiftUnloadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RelationalSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RelationalTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RemotePrivateEndpointConnection", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RerunTumblingWindowTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ResponsysLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ResponsysObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ResponsysSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RestResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RestServiceAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RestServiceLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RestSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RestSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RetryPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunFilterParameters", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryFilter", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryOrder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryOrderBy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.RunQueryOrderByField", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceMarketingCloudLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceMarketingCloudObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceMarketingCloudSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2ObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2Sink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceSinkWriteBehavior", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceV2ObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceV2Sink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceV2SinkWriteBehavior", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SalesforceV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapBWLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapBwCubeDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapBwSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapCloudForCustomerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapCloudForCustomerResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSinkWriteBehavior", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapEccLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapEccResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapEccSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapHanaAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapHanaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapHanaPartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapHanaSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapHanaTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOdpLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOdpResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOdpSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOpenHubLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOpenHubSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapOpenHubTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapTableLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapTablePartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapTableResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SapTableSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScheduleTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptAction", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityLogDestination", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityParameter", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SecretBase", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SecureInputOutputPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SecureString", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SelfDependencyTumblingWindowTriggerReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowV2AuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowV2ObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServiceNowV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ServicePrincipalCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SetVariableActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SftpAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SftpLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SftpReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SftpServerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SftpWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SharePointOnlineListLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SharePointOnlineListResourceDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SharePointOnlineListSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ShopifyLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ShopifyObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ShopifySource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SkipErrorFile", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SmartsheetLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeExportCopyCommand", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeImportCopyCommand", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeV2Dataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeV2LinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeV2Sink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SnowflakeV2Source", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkConfigurationReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkJobReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkServerType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SparkThriftTransportProtocol", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedAkvAuthType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlAlwaysEncryptedProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlDWSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlDWSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlDWUpsertSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlMISink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlMISource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlPartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerStoredProcedureActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlServerTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SqlUpsertSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SquareLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SquareObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SquareSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisAccessCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisChildPackage", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisEnvironment", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisEnvironmentReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisExecutionCredential", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisExecutionParameter", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisFolder", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisLogLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisLogLocationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisObjectMetadata", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisObjectMetadataType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisPackage", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisPackageLocation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisPackageLocationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisParameter", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisProject", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisPropertyOverride", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SsisVariable", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.StagingSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.StoreReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.StoreWriteSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SubResourceDebugResource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SwitchActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SwitchCase", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SybaseAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SybaseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SybaseSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SybaseTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SynapseNotebookActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SynapseNotebookReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SynapseSparkJobDefinitionActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TabularSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TabularTranslator", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TarGZipReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TarReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeamDeskAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeamDeskLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeradataAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeradataLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeradataPartitionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeradataSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TeradataTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TextFormat", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Transformation", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.Trigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerDependencyReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerFilterParameters", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerListResponse", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerPipelineReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerReferenceType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerRun", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerRunStatus", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TriggerRuntimeState", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TumblingWindowFrequency", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TumblingWindowTrigger", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TumblingWindowTriggerDependencyReference", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TwilioLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.TypeConversionSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.UntilActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeRequest", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.UserAccessPolicy", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.UserProperty", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ValidationActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.VariableSpecification", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.VariableType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.VerticaLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.VerticaSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.VerticaTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WaitActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WarehouseLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WarehouseSink", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WarehouseSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WarehouseTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebActivityAuthentication", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebActivityMethod", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebAnonymousAuthentication", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebBasicAuthentication", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebClientCertificateAuthentication", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebLinkedServiceTypeProperties", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebTableDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebhookActivity", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WebhookActivityMethod", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.WranglingDataFlow", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XeroLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XeroObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XeroSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XmlDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XmlReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.XmlSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZendeskAuthenticationType", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZendeskLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZipDeflateReadSettings", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZohoLinkedService", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZohoObjectDataset", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -}, { - "name" : "com.azure.resourcemanager.datafactory.models.ZohoSource", - "allDeclaredConstructors" : true, - "allDeclaredFields" : true, - "allDeclaredMethods" : true -} ] \ No newline at end of file +[ ] \ No newline at end of file diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java deleted file mode 100644 index 0f9e580423eb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityDependencyTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ActivityDependencyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ActivityDependency model = BinaryData.fromString( - "{\"activity\":\"wcoezbrhub\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"okkqfqjbvleo\":\"dataygo\"}}") - .toObject(ActivityDependency.class); - Assertions.assertEquals("wcoezbrhub", model.activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependencyConditions().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ActivityDependency model = new ActivityDependency().withActivity("wcoezbrhub") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(ActivityDependency.class); - Assertions.assertEquals("wcoezbrhub", model.activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependencyConditions().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java deleted file mode 100644 index a08d10d44813..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityPolicyTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ActivityPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ActivityPolicy model = BinaryData.fromString( - "{\"timeout\":\"datazypmmyr\",\"retry\":\"datasdouzohihqlwy\",\"retryIntervalInSeconds\":1734763224,\"secureInput\":false,\"secureOutput\":true,\"\":{\"qztakov\":\"datamczcxouaneuep\",\"sdsnuqqg\":\"datalvwpvdmtfcstucmi\",\"ka\":\"dataqitiut\"}}") - .toObject(ActivityPolicy.class); - Assertions.assertEquals(1734763224, model.retryIntervalInSeconds()); - Assertions.assertEquals(false, model.secureInput()); - Assertions.assertEquals(true, model.secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ActivityPolicy model = new ActivityPolicy().withTimeout("datazypmmyr") - .withRetry("datasdouzohihqlwy") - .withRetryIntervalInSeconds(1734763224) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(ActivityPolicy.class); - Assertions.assertEquals(1734763224, model.retryIntervalInSeconds()); - Assertions.assertEquals(false, model.secureInput()); - Assertions.assertEquals(true, model.secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java deleted file mode 100644 index b6b17cbc4b11..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityRunTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityRun; -import java.util.HashMap; -import java.util.Map; - -public final class ActivityRunTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ActivityRun model = BinaryData.fromString( - "{\"pipelineName\":\"tj\",\"pipelineRunId\":\"ysdzhez\",\"activityName\":\"vaiqyuvvf\",\"activityType\":\"kphhq\",\"activityRunId\":\"kvylauyavl\",\"linkedServiceName\":\"mncsttijfybvp\",\"status\":\"krsgsgb\",\"activityRunStart\":\"2021-08-14T00:34:57Z\",\"activityRunEnd\":\"2021-08-27T04:01:57Z\",\"durationInMs\":1030277029,\"input\":\"datadgkynscliqhzvhxn\",\"output\":\"datamtk\",\"error\":\"dataotppnv\",\"\":{\"dhlfkqojpykvgt\":\"dataxhihfrbbcevqagtl\"}}") - .toObject(ActivityRun.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ActivityRun model = new ActivityRun().withAdditionalProperties( - mapOf("durationInMs", 1030277029, "linkedServiceName", "mncsttijfybvp", "activityRunStart", - "2021-08-14T00:34:57Z", "activityRunEnd", "2021-08-27T04:01:57Z", "activityName", "vaiqyuvvf", "error", - "dataotppnv", "pipelineName", "tj", "output", "datamtk", "activityRunId", "kvylauyavl", "input", - "datadgkynscliqhzvhxn", "pipelineRunId", "ysdzhez", "activityType", "kphhq", "status", "krsgsgb")); - model = BinaryData.fromObject(model).toObject(ActivityRun.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java deleted file mode 100644 index 9cd09ed3f074..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ActivityTests.java +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Activity model = BinaryData.fromString( - "{\"type\":\"Activity\",\"name\":\"volvtn\",\"description\":\"qfzgemjdftul\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"amtmcz\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"qioknssxmojm\":\"datawcw\"}},{\"activity\":\"vpkjpr\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ydbsd\":\"datazqljyxgtczh\"}}],\"userProperties\":[{\"name\":\"kx\",\"value\":\"dataaehvbbxuri\"}],\"\":{\"ckpyklyhplu\":\"datafnhtbaxkgxyw\",\"gzibthostgktstv\":\"datadpvruud\",\"odqkdlwwqfb\":\"dataxeclzedqbcvhzlhp\",\"lmbtxhwgfwsrt\":\"datamlkxtrqjfs\"}}") - .toObject(Activity.class); - Assertions.assertEquals("volvtn", model.name()); - Assertions.assertEquals("qfzgemjdftul", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("amtmcz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kx", model.userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Activity model = new Activity().withName("volvtn") - .withDescription("qfzgemjdftul") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("amtmcz") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vpkjpr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("kx").withValue("dataaehvbbxuri"))) - .withAdditionalProperties(mapOf("type", "Activity")); - model = BinaryData.fromObject(model).toObject(Activity.class); - Assertions.assertEquals("volvtn", model.name()); - Assertions.assertEquals("qfzgemjdftul", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("amtmcz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("kx", model.userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java deleted file mode 100644 index 61e54aa01de8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AddDataFlowToDebugSessionResponseInnerTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AddDataFlowToDebugSessionResponseInner; -import org.junit.jupiter.api.Assertions; - -public final class AddDataFlowToDebugSessionResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AddDataFlowToDebugSessionResponseInner model = BinaryData.fromString("{\"jobVersion\":\"fbcgwgcloxoebqin\"}") - .toObject(AddDataFlowToDebugSessionResponseInner.class); - Assertions.assertEquals("fbcgwgcloxoebqin", model.jobVersion()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AddDataFlowToDebugSessionResponseInner model - = new AddDataFlowToDebugSessionResponseInner().withJobVersion("fbcgwgcloxoebqin"); - model = BinaryData.fromObject(model).toObject(AddDataFlowToDebugSessionResponseInner.class); - Assertions.assertEquals("fbcgwgcloxoebqin", model.jobVersion()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java deleted file mode 100644 index 14edb8f8e217..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonMwsObjectDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AmazonMwsObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonMwsObjectDataset model = BinaryData.fromString( - "{\"type\":\"AmazonMWSObject\",\"typeProperties\":{\"tableName\":\"dataclz\"},\"description\":\"rdpuyytbpkrp\",\"structure\":\"dataqetp\",\"schema\":\"datanefnoafp\",\"linkedServiceName\":{\"referenceName\":\"nrxiyrxow\",\"parameters\":{\"frfa\":\"datafcmuajwblxphto\",\"ihtibufgz\":\"datatnnsvrfajy\",\"ctblfehb\":\"datazhl\"}},\"parameters\":{\"jxdumhycxonebld\":{\"type\":\"Int\",\"defaultValue\":\"datahrcmeq\"}},\"annotations\":[\"datamdfjwcngkwxjsj\",\"datauvohufzdtsrp\"],\"folder\":{\"name\":\"gzacrzhns\"},\"\":{\"uanbfulvc\":\"datagmgbhuqcz\"}}") - .toObject(AmazonMwsObjectDataset.class); - Assertions.assertEquals("rdpuyytbpkrp", model.description()); - Assertions.assertEquals("nrxiyrxow", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("jxdumhycxonebld").type()); - Assertions.assertEquals("gzacrzhns", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonMwsObjectDataset model = new AmazonMwsObjectDataset().withDescription("rdpuyytbpkrp") - .withStructure("dataqetp") - .withSchema("datanefnoafp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("nrxiyrxow") - .withParameters( - mapOf("frfa", "datafcmuajwblxphto", "ihtibufgz", "datatnnsvrfajy", "ctblfehb", "datazhl"))) - .withParameters(mapOf("jxdumhycxonebld", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datahrcmeq"))) - .withAnnotations(Arrays.asList("datamdfjwcngkwxjsj", "datauvohufzdtsrp")) - .withFolder(new DatasetFolder().withName("gzacrzhns")) - .withTableName("dataclz"); - model = BinaryData.fromObject(model).toObject(AmazonMwsObjectDataset.class); - Assertions.assertEquals("rdpuyytbpkrp", model.description()); - Assertions.assertEquals("nrxiyrxow", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("jxdumhycxonebld").type()); - Assertions.assertEquals("gzacrzhns", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java deleted file mode 100644 index 97cced03f3f5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonMwsSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonMwsSource; - -public final class AmazonMwsSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonMwsSource model = BinaryData.fromString( - "{\"type\":\"AmazonMWSSource\",\"query\":\"dataawbftzn\",\"queryTimeout\":\"datarfhj\",\"additionalColumns\":\"dataiutbrnr\",\"sourceRetryCount\":\"dataljucodrbkdieismd\",\"sourceRetryWait\":\"datafim\",\"maxConcurrentConnections\":\"datacij\",\"disableMetricsCollection\":\"datamnkvpafoe\",\"\":{\"llcckgfoxvrbf\":\"dataskelwzmjiigq\"}}") - .toObject(AmazonMwsSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonMwsSource model = new AmazonMwsSource().withSourceRetryCount("dataljucodrbkdieismd") - .withSourceRetryWait("datafim") - .withMaxConcurrentConnections("datacij") - .withDisableMetricsCollection("datamnkvpafoe") - .withQueryTimeout("datarfhj") - .withAdditionalColumns("dataiutbrnr") - .withQuery("dataawbftzn"); - model = BinaryData.fromObject(model).toObject(AmazonMwsSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java deleted file mode 100644 index a979b3712ab1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOraclePartitionSettingsTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForOraclePartitionSettings; - -public final class AmazonRdsForOraclePartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForOraclePartitionSettings model = BinaryData.fromString( - "{\"partitionNames\":\"databsspexejhwpnjc\",\"partitionColumnName\":\"datacj\",\"partitionUpperBound\":\"dataovuvmdzdqtir\",\"partitionLowerBound\":\"dataajsrdecbowkhmaff\"}") - .toObject(AmazonRdsForOraclePartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForOraclePartitionSettings model - = new AmazonRdsForOraclePartitionSettings().withPartitionNames("databsspexejhwpnjc") - .withPartitionColumnName("datacj") - .withPartitionUpperBound("dataovuvmdzdqtir") - .withPartitionLowerBound("dataajsrdecbowkhmaff"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForOraclePartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java deleted file mode 100644 index 567a2aedd78a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleSourceTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForOraclePartitionSettings; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleSource; - -public final class AmazonRdsForOracleSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForOracleSource model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForOracleSource\",\"oracleReaderQuery\":\"datadswfnomciwhu\",\"queryTimeout\":\"datag\",\"partitionOption\":\"datafnd\",\"partitionSettings\":{\"partitionNames\":\"datau\",\"partitionColumnName\":\"datafvr\",\"partitionUpperBound\":\"datacgdptfxoffck\",\"partitionLowerBound\":\"dataxomnguqwx\"},\"additionalColumns\":\"datantt\",\"sourceRetryCount\":\"dataksvxim\",\"sourceRetryWait\":\"datasiy\",\"maxConcurrentConnections\":\"datauyzrnngnmfh\",\"disableMetricsCollection\":\"datafoummdomvd\",\"\":{\"sidfhsfnoczefg\":\"datayqalwlirapq\",\"bxzeiudogqfc\":\"dataqxejjnti\",\"omufazacwzbe\":\"databro\"}}") - .toObject(AmazonRdsForOracleSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForOracleSource model = new AmazonRdsForOracleSource().withSourceRetryCount("dataksvxim") - .withSourceRetryWait("datasiy") - .withMaxConcurrentConnections("datauyzrnngnmfh") - .withDisableMetricsCollection("datafoummdomvd") - .withOracleReaderQuery("datadswfnomciwhu") - .withQueryTimeout("datag") - .withPartitionOption("datafnd") - .withPartitionSettings(new AmazonRdsForOraclePartitionSettings().withPartitionNames("datau") - .withPartitionColumnName("datafvr") - .withPartitionUpperBound("datacgdptfxoffck") - .withPartitionLowerBound("dataxomnguqwx")) - .withAdditionalColumns("datantt"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java deleted file mode 100644 index 453916f77c3b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AmazonRdsForOracleTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForOracleTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForOracleTable\",\"typeProperties\":{\"schema\":\"dataotygnbknhjg\",\"table\":\"dataxaxw\"},\"description\":\"faspsdzkucszghd\",\"structure\":\"dataqip\",\"schema\":\"dataxclfrs\",\"linkedServiceName\":{\"referenceName\":\"zrn\",\"parameters\":{\"fddtbfmekjcng\":\"datawvpu\",\"dv\":\"datan\",\"xf\":\"dataaoy\",\"txoqxtdn\":\"datagyxzmxynofxl\"}},\"parameters\":{\"pskcjhmm\":{\"type\":\"SecureString\",\"defaultValue\":\"datairk\"}},\"annotations\":[\"datanivdqtkyk\",\"dataaxnlsfgnysca\",\"datacptbzetxygxxicee\"],\"folder\":{\"name\":\"wyuveoxmp\"},\"\":{\"vhaztkxbivz\":\"dataahdr\",\"ykqfdqwdrtx\":\"datagxmbrygmwibiosiq\",\"ounzsiywh\":\"datafdaglmrcokzzert\",\"popikzeb\":\"databym\"}}") - .toObject(AmazonRdsForOracleTableDataset.class); - Assertions.assertEquals("faspsdzkucszghd", model.description()); - Assertions.assertEquals("zrn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("pskcjhmm").type()); - Assertions.assertEquals("wyuveoxmp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForOracleTableDataset model - = new AmazonRdsForOracleTableDataset().withDescription("faspsdzkucszghd") - .withStructure("dataqip") - .withSchema("dataxclfrs") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zrn") - .withParameters(mapOf("fddtbfmekjcng", "datawvpu", "dv", "datan", "xf", "dataaoy", "txoqxtdn", - "datagyxzmxynofxl"))) - .withParameters(mapOf("pskcjhmm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datairk"))) - .withAnnotations(Arrays.asList("datanivdqtkyk", "dataaxnlsfgnysca", "datacptbzetxygxxicee")) - .withFolder(new DatasetFolder().withName("wyuveoxmp")) - .withSchemaTypePropertiesSchema("dataotygnbknhjg") - .withTable("dataxaxw"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleTableDataset.class); - Assertions.assertEquals("faspsdzkucszghd", model.description()); - Assertions.assertEquals("zrn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("pskcjhmm").type()); - Assertions.assertEquals("wyuveoxmp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java deleted file mode 100644 index ffd588bfec28..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForOracleTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForOracleTableDatasetTypeProperties; - -public final class AmazonRdsForOracleTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForOracleTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datanfywtkq\",\"table\":\"datasdlkhcz\"}") - .toObject(AmazonRdsForOracleTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForOracleTableDatasetTypeProperties model - = new AmazonRdsForOracleTableDatasetTypeProperties().withSchema("datanfywtkq").withTable("datasdlkhcz"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForOracleTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java deleted file mode 100644 index 0ffd6da3e15a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerSourceTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlServerSource; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; - -public final class AmazonRdsForSqlServerSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForSqlServerSource model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForSqlServerSource\",\"sqlReaderQuery\":\"datakubljnizwztlcrx\",\"sqlReaderStoredProcedureName\":\"dataiifgxnfarmf\",\"storedProcedureParameters\":\"dataqrd\",\"isolationLevel\":\"datavtrulzlrm\",\"produceAdditionalTypes\":\"datacsmpmhlc\",\"partitionOption\":\"dataa\",\"partitionSettings\":{\"partitionColumnName\":\"datacdfelvap\",\"partitionUpperBound\":\"databicjzntiblxeygo\",\"partitionLowerBound\":\"datahroi\"},\"queryTimeout\":\"datatg\",\"additionalColumns\":\"dataymoanpkcmdixiu\",\"sourceRetryCount\":\"databc\",\"sourceRetryWait\":\"datagspzoafprzlvho\",\"maxConcurrentConnections\":\"datakc\",\"disableMetricsCollection\":\"dataddzposmnm\",\"\":{\"muueoxmkru\":\"datae\"}}") - .toObject(AmazonRdsForSqlServerSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForSqlServerSource model = new AmazonRdsForSqlServerSource().withSourceRetryCount("databc") - .withSourceRetryWait("datagspzoafprzlvho") - .withMaxConcurrentConnections("datakc") - .withDisableMetricsCollection("dataddzposmnm") - .withQueryTimeout("datatg") - .withAdditionalColumns("dataymoanpkcmdixiu") - .withSqlReaderQuery("datakubljnizwztlcrx") - .withSqlReaderStoredProcedureName("dataiifgxnfarmf") - .withStoredProcedureParameters("dataqrd") - .withIsolationLevel("datavtrulzlrm") - .withProduceAdditionalTypes("datacsmpmhlc") - .withPartitionOption("dataa") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datacdfelvap") - .withPartitionUpperBound("databicjzntiblxeygo") - .withPartitionLowerBound("datahroi")); - model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java deleted file mode 100644 index 1cea912783e2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRdsForSqlServerTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AmazonRdsForSqlServerTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForSqlServerTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRdsForSqlServerTable\",\"typeProperties\":{\"schema\":\"datakbzbloasyb\",\"table\":\"dataqvovdpmhttu\"},\"description\":\"qjsrvjnqtaqgxq\",\"structure\":\"datakceincnrec\",\"schema\":\"dataiywevsfg\",\"linkedServiceName\":{\"referenceName\":\"rmnszdosmjsqsvz\",\"parameters\":{\"wghndaevhgs\":\"datat\",\"jjuzk\":\"datalwvgseufigv\"}},\"parameters\":{\"vljlbzdlby\":{\"type\":\"Array\",\"defaultValue\":\"datawuhyzekypy\"},\"ov\":{\"type\":\"String\",\"defaultValue\":\"dataxhpzy\"}},\"annotations\":[\"databhanz\"],\"folder\":{\"name\":\"fhsh\"},\"\":{\"zpbyfyvynpmggq\":\"dataahn\",\"izorbloejzs\":\"dataagenvqbugihcdvf\"}}") - .toObject(AmazonRdsForSqlServerTableDataset.class); - Assertions.assertEquals("qjsrvjnqtaqgxq", model.description()); - Assertions.assertEquals("rmnszdosmjsqsvz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vljlbzdlby").type()); - Assertions.assertEquals("fhsh", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForSqlServerTableDataset model - = new AmazonRdsForSqlServerTableDataset().withDescription("qjsrvjnqtaqgxq") - .withStructure("datakceincnrec") - .withSchema("dataiywevsfg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rmnszdosmjsqsvz") - .withParameters(mapOf("wghndaevhgs", "datat", "jjuzk", "datalwvgseufigv"))) - .withParameters(mapOf("vljlbzdlby", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datawuhyzekypy"), "ov", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxhpzy"))) - .withAnnotations(Arrays.asList("databhanz")) - .withFolder(new DatasetFolder().withName("fhsh")) - .withSchemaTypePropertiesSchema("datakbzbloasyb") - .withTable("dataqvovdpmhttu"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerTableDataset.class); - Assertions.assertEquals("qjsrvjnqtaqgxq", model.description()); - Assertions.assertEquals("rmnszdosmjsqsvz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("vljlbzdlby").type()); - Assertions.assertEquals("fhsh", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java deleted file mode 100644 index 8ab323e721cc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRdsForSqlServerTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AmazonRdsForSqlServerTableDatasetTypeProperties; - -public final class AmazonRdsForSqlServerTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRdsForSqlServerTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datazgkqwvde\",\"table\":\"datajnaphifk\"}") - .toObject(AmazonRdsForSqlServerTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRdsForSqlServerTableDatasetTypeProperties model - = new AmazonRdsForSqlServerTableDatasetTypeProperties().withSchema("datazgkqwvde") - .withTable("datajnaphifk"); - model = BinaryData.fromObject(model).toObject(AmazonRdsForSqlServerTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java deleted file mode 100644 index 9db3f3c4e1db..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftSourceTests.java +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRedshiftSource; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.RedshiftUnloadSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AmazonRedshiftSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRedshiftSource model = BinaryData.fromString( - "{\"type\":\"AmazonRedshiftSource\",\"query\":\"datauuqcmu\",\"redshiftUnloadSettings\":{\"s3LinkedServiceName\":{\"referenceName\":\"f\",\"parameters\":{\"erwywl\":\"datagnyvypuioxhow\"}},\"bucketName\":\"datahiuwv\"},\"queryTimeout\":\"datagejytqnzrcbh\",\"additionalColumns\":\"datahctjvlwfnzgz\",\"sourceRetryCount\":\"datafyvytydrdcwbaiaq\",\"sourceRetryWait\":\"datauhsmuclx\",\"maxConcurrentConnections\":\"dataedusu\",\"disableMetricsCollection\":\"datacvykagsxhz\",\"\":{\"olqx\":\"datavvlib\",\"fgwuj\":\"dataoedjzrvfrfsyq\",\"djnspyfqoygutq\":\"dataowthvuepszzn\"}}") - .toObject(AmazonRedshiftSource.class); - Assertions.assertEquals("f", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRedshiftSource model - = new AmazonRedshiftSource().withSourceRetryCount("datafyvytydrdcwbaiaq") - .withSourceRetryWait("datauhsmuclx") - .withMaxConcurrentConnections("dataedusu") - .withDisableMetricsCollection("datacvykagsxhz") - .withQueryTimeout("datagejytqnzrcbh") - .withAdditionalColumns("datahctjvlwfnzgz") - .withQuery("datauuqcmu") - .withRedshiftUnloadSettings(new RedshiftUnloadSettings() - .withS3LinkedServiceName(new LinkedServiceReference().withReferenceName("f") - .withParameters(mapOf("erwywl", "datagnyvypuioxhow"))) - .withBucketName("datahiuwv")); - model = BinaryData.fromObject(model).toObject(AmazonRedshiftSource.class); - Assertions.assertEquals("f", model.redshiftUnloadSettings().s3LinkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java deleted file mode 100644 index 5e037661f595..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonRedshiftTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AmazonRedshiftTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRedshiftTableDataset model = BinaryData.fromString( - "{\"type\":\"AmazonRedshiftTable\",\"typeProperties\":{\"tableName\":\"dataync\",\"table\":\"datau\",\"schema\":\"datafstyygjqpulm\"},\"description\":\"gm\",\"structure\":\"dataqmiwxzfvvzucqfg\",\"schema\":\"datajnbxwbmwdukin\",\"linkedServiceName\":{\"referenceName\":\"lxhgdekekzou\",\"parameters\":{\"cgldohgc\":\"datawwpzrd\",\"dqtdnnc\":\"datandxfhhhtes\"}},\"parameters\":{\"dxccyijj\":{\"type\":\"Object\",\"defaultValue\":\"datadshvvf\"},\"ydw\":{\"type\":\"Int\",\"defaultValue\":\"dataijzrqnjxmvvsd\"},\"exqwqnghxnimvy\":{\"type\":\"Object\",\"defaultValue\":\"dataruhhqldrdymnswx\"},\"tnylqu\":{\"type\":\"Object\",\"defaultValue\":\"dataxgunnqgypu\"}},\"annotations\":[\"datamvyumgmmuebsnzn\",\"datagsqufmjxcyo\",\"dataeqcazisvbrqgcy\",\"datapgawepk\"],\"folder\":{\"name\":\"rzp\"},\"\":{\"taflvs\":\"datardtbgblxbuibrvj\"}}") - .toObject(AmazonRedshiftTableDataset.class); - Assertions.assertEquals("gm", model.description()); - Assertions.assertEquals("lxhgdekekzou", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("dxccyijj").type()); - Assertions.assertEquals("rzp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRedshiftTableDataset model = new AmazonRedshiftTableDataset().withDescription("gm") - .withStructure("dataqmiwxzfvvzucqfg") - .withSchema("datajnbxwbmwdukin") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lxhgdekekzou") - .withParameters(mapOf("cgldohgc", "datawwpzrd", "dqtdnnc", "datandxfhhhtes"))) - .withParameters(mapOf("dxccyijj", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datadshvvf"), "ydw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataijzrqnjxmvvsd"), - "exqwqnghxnimvy", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataruhhqldrdymnswx"), - "tnylqu", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataxgunnqgypu"))) - .withAnnotations( - Arrays.asList("datamvyumgmmuebsnzn", "datagsqufmjxcyo", "dataeqcazisvbrqgcy", "datapgawepk")) - .withFolder(new DatasetFolder().withName("rzp")) - .withTableName("dataync") - .withTable("datau") - .withSchemaTypePropertiesSchema("datafstyygjqpulm"); - model = BinaryData.fromObject(model).toObject(AmazonRedshiftTableDataset.class); - Assertions.assertEquals("gm", model.description()); - Assertions.assertEquals("lxhgdekekzou", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("dxccyijj").type()); - Assertions.assertEquals("rzp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java deleted file mode 100644 index 07b3b8ee6d7c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonRedshiftTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AmazonRedshiftTableDatasetTypeProperties; - -public final class AmazonRedshiftTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonRedshiftTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datajihvfjcqrttjfuq\",\"table\":\"datafjewfeqbavdo\",\"schema\":\"datawy\"}") - .toObject(AmazonRedshiftTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonRedshiftTableDatasetTypeProperties model - = new AmazonRedshiftTableDatasetTypeProperties().withTableName("datajihvfjcqrttjfuq") - .withTable("datafjewfeqbavdo") - .withSchema("datawy"); - model = BinaryData.fromObject(model).toObject(AmazonRedshiftTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java deleted file mode 100644 index 5e008fabd973..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleLocationTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonS3CompatibleLocation; - -public final class AmazonS3CompatibleLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonS3CompatibleLocation model = BinaryData.fromString( - "{\"type\":\"AmazonS3CompatibleLocation\",\"bucketName\":\"datahdyswcrptveajc\",\"version\":\"datavlgsrgkrfiz\",\"folderPath\":\"dataywlptyuqh\",\"fileName\":\"datamc\",\"\":{\"npcfyk\":\"datakpfuofix\",\"jp\":\"datapyycpawm\"}}") - .toObject(AmazonS3CompatibleLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonS3CompatibleLocation model = new AmazonS3CompatibleLocation().withFolderPath("dataywlptyuqh") - .withFileName("datamc") - .withBucketName("datahdyswcrptveajc") - .withVersion("datavlgsrgkrfiz"); - model = BinaryData.fromObject(model).toObject(AmazonS3CompatibleLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java deleted file mode 100644 index 71ce885576f1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3CompatibleReadSettingsTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonS3CompatibleReadSettings; - -public final class AmazonS3CompatibleReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonS3CompatibleReadSettings model = BinaryData.fromString( - "{\"type\":\"AmazonS3CompatibleReadSettings\",\"recursive\":\"dataugthcdbz\",\"wildcardFolderPath\":\"datahoiogpb\",\"wildcardFileName\":\"datapbwefoxlzrijpkbr\",\"prefix\":\"datazupq\",\"fileListPath\":\"dataqeqjtzawen\",\"enablePartitionDiscovery\":\"datagitzdjix\",\"partitionRootPath\":\"datapladohnizvvekpq\",\"deleteFilesAfterCompletion\":\"dataqvmhtyafcvtosovx\",\"modifiedDatetimeStart\":\"dataooqekzxqrzsng\",\"modifiedDatetimeEnd\":\"datalbsnlfp\",\"maxConcurrentConnections\":\"datajrh\",\"disableMetricsCollection\":\"datahiorhwu\",\"\":{\"jgtourrqmch\":\"datamyjvko\"}}") - .toObject(AmazonS3CompatibleReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonS3CompatibleReadSettings model - = new AmazonS3CompatibleReadSettings().withMaxConcurrentConnections("datajrh") - .withDisableMetricsCollection("datahiorhwu") - .withRecursive("dataugthcdbz") - .withWildcardFolderPath("datahoiogpb") - .withWildcardFileName("datapbwefoxlzrijpkbr") - .withPrefix("datazupq") - .withFileListPath("dataqeqjtzawen") - .withEnablePartitionDiscovery("datagitzdjix") - .withPartitionRootPath("datapladohnizvvekpq") - .withDeleteFilesAfterCompletion("dataqvmhtyafcvtosovx") - .withModifiedDatetimeStart("dataooqekzxqrzsng") - .withModifiedDatetimeEnd("datalbsnlfp"); - model = BinaryData.fromObject(model).toObject(AmazonS3CompatibleReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java deleted file mode 100644 index 47192b8354f5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3LocationTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonS3Location; - -public final class AmazonS3LocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonS3Location model = BinaryData.fromString( - "{\"type\":\"AmazonS3Location\",\"bucketName\":\"dataltqstqkqsygxiyn\",\"version\":\"dataovagzkheuban\",\"folderPath\":\"datau\",\"fileName\":\"dataqcckqiawzlz\",\"\":{\"izuxlrarwpewsaud\":\"dataslga\",\"sx\":\"dataejtig\",\"qbalahovuuwxhme\":\"datapytn\",\"oti\":\"datajnhj\"}}") - .toObject(AmazonS3Location.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonS3Location model = new AmazonS3Location().withFolderPath("datau") - .withFileName("dataqcckqiawzlz") - .withBucketName("dataltqstqkqsygxiyn") - .withVersion("dataovagzkheuban"); - model = BinaryData.fromObject(model).toObject(AmazonS3Location.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java deleted file mode 100644 index 828ddc8c64b4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AmazonS3ReadSettingsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AmazonS3ReadSettings; - -public final class AmazonS3ReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AmazonS3ReadSettings model = BinaryData.fromString( - "{\"type\":\"AmazonS3ReadSettings\",\"recursive\":\"datagnomtkgoy\",\"wildcardFolderPath\":\"dataaarwphbuzmvjiltl\",\"wildcardFileName\":\"datafkctpa\",\"prefix\":\"datahrjwyxduwimwnuvj\",\"fileListPath\":\"datawpfxi\",\"enablePartitionDiscovery\":\"datafkmnj\",\"partitionRootPath\":\"dataswmyjdbcknso\",\"deleteFilesAfterCompletion\":\"datampdkcbp\",\"modifiedDatetimeStart\":\"datagu\",\"modifiedDatetimeEnd\":\"dataaitkcendwg\",\"maxConcurrentConnections\":\"datas\",\"disableMetricsCollection\":\"datanppxxeysqe\",\"\":{\"nysemunvzmv\":\"dataug\",\"sxuuksvfsukpk\":\"databckl\",\"razwkcohjxoc\":\"dataealbckpnnenrcezt\",\"uudr\":\"datamvuamorhkne\"}}") - .toObject(AmazonS3ReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AmazonS3ReadSettings model = new AmazonS3ReadSettings().withMaxConcurrentConnections("datas") - .withDisableMetricsCollection("datanppxxeysqe") - .withRecursive("datagnomtkgoy") - .withWildcardFolderPath("dataaarwphbuzmvjiltl") - .withWildcardFileName("datafkctpa") - .withPrefix("datahrjwyxduwimwnuvj") - .withFileListPath("datawpfxi") - .withEnablePartitionDiscovery("datafkmnj") - .withPartitionRootPath("dataswmyjdbcknso") - .withDeleteFilesAfterCompletion("datampdkcbp") - .withModifiedDatetimeStart("datagu") - .withModifiedDatetimeEnd("dataaitkcendwg"); - model = BinaryData.fromObject(model).toObject(AmazonS3ReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java deleted file mode 100644 index d7e5a2a52f83..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTests.java +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AppendVariableActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AppendVariableActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AppendVariableActivity model = BinaryData.fromString( - "{\"type\":\"AppendVariable\",\"typeProperties\":{\"variableName\":\"gm\",\"value\":\"datalqswwdbsvghys\"},\"name\":\"dqrbevobq\",\"description\":\"ngfyjfq\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"yibycoupksa\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"gyjoklngjsglzoir\":\"dataxvffrncswv\",\"pbgak\":\"datasqdnasj\",\"rgye\":\"dataszzbdt\",\"qiot\":\"datavqslikeuq\"}},{\"activity\":\"fcbgffd\",\"dependencyConditions\":[\"Completed\"],\"\":{\"qawtfyzqop\":\"datat\",\"ea\":\"datalixhapvwacwrc\",\"ble\":\"dataucnknzncoxeop\"}},{\"activity\":\"axrsyxeqwgaeice\",\"dependencyConditions\":[\"Completed\"],\"\":{\"cxkywypztssq\":\"dataci\",\"wzwvttkh\":\"dataclaec\",\"qjqjkhqa\":\"dataxqyinfd\"}}],\"userProperties\":[{\"name\":\"czaqgevsnnqvkuf\",\"value\":\"datazwgw\"},{\"name\":\"dv\",\"value\":\"dataskffqqaobbq\"}],\"\":{\"ykhtsycct\":\"datajusqhrvadffdr\",\"siembc\":\"datarvn\",\"ixjkxvz\":\"datatzmldw\",\"orqbmkfo\":\"dataa\"}}") - .toObject(AppendVariableActivity.class); - Assertions.assertEquals("dqrbevobq", model.name()); - Assertions.assertEquals("ngfyjfq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("yibycoupksa", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("czaqgevsnnqvkuf", model.userProperties().get(0).name()); - Assertions.assertEquals("gm", model.variableName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AppendVariableActivity model = new AppendVariableActivity().withName("dqrbevobq") - .withDescription("ngfyjfq") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("yibycoupksa") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("fcbgffd") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("axrsyxeqwgaeice") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("czaqgevsnnqvkuf").withValue("datazwgw"), - new UserProperty().withName("dv").withValue("dataskffqqaobbq"))) - .withVariableName("gm") - .withValue("datalqswwdbsvghys"); - model = BinaryData.fromObject(model).toObject(AppendVariableActivity.class); - Assertions.assertEquals("dqrbevobq", model.name()); - Assertions.assertEquals("ngfyjfq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("yibycoupksa", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("czaqgevsnnqvkuf", model.userProperties().get(0).name()); - Assertions.assertEquals("gm", model.variableName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java deleted file mode 100644 index 30da891b733f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AppendVariableActivityTypePropertiesTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AppendVariableActivityTypeProperties; -import org.junit.jupiter.api.Assertions; - -public final class AppendVariableActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AppendVariableActivityTypeProperties model - = BinaryData.fromString("{\"variableName\":\"aaqfqgmwdohvinvz\",\"value\":\"dataubsaskgi\"}") - .toObject(AppendVariableActivityTypeProperties.class); - Assertions.assertEquals("aaqfqgmwdohvinvz", model.variableName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AppendVariableActivityTypeProperties model - = new AppendVariableActivityTypeProperties().withVariableName("aaqfqgmwdohvinvz").withValue("dataubsaskgi"); - model = BinaryData.fromObject(model).toObject(AppendVariableActivityTypeProperties.class); - Assertions.assertEquals("aaqfqgmwdohvinvz", model.variableName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java deleted file mode 100644 index dde4be47b60b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ArmIdWrapperTests.java +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ArmIdWrapper; - -public final class ArmIdWrapperTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ArmIdWrapper model = BinaryData.fromString("{\"id\":\"roznnhdrlktgj\"}").toObject(ArmIdWrapper.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ArmIdWrapper model = new ArmIdWrapper(); - model = BinaryData.fromObject(model).toObject(ArmIdWrapper.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java deleted file mode 100644 index 9df72a8477eb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroFormatTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AvroFormat; - -public final class AvroFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AvroFormat model = BinaryData.fromString( - "{\"type\":\"AvroFormat\",\"serializer\":\"datanck\",\"deserializer\":\"datalblfxlupibaqzizx\",\"\":{\"qiyndveqels\":\"dataweghlwwbogvgfk\",\"ns\":\"datafvdstrkzxsgt\",\"smovpi\":\"datalr\",\"dnox\":\"datay\"}}") - .toObject(AvroFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AvroFormat model = new AvroFormat().withSerializer("datanck").withDeserializer("datalblfxlupibaqzizx"); - model = BinaryData.fromObject(model).toObject(AvroFormat.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java deleted file mode 100644 index 9e2c0e60af4d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSinkTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AvroSink; -import com.azure.resourcemanager.datafactory.models.AvroWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AvroSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AvroSink model = BinaryData.fromString( - "{\"type\":\"AvroSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datahuab\",\"disableMetricsCollection\":\"datafh\",\"copyBehavior\":\"dataoxnpuap\",\"metadata\":[{\"name\":\"dataekiqlscmtcl\",\"value\":\"datapivt\"},{\"name\":\"datavcfch\",\"value\":\"datakcjjnqxjj\"},{\"name\":\"datayajdf\",\"value\":\"dataxjm\"},{\"name\":\"datagfbzbxeqzvokfr\",\"value\":\"dataaf\"}],\"\":{\"vuefsrxqsc\":\"datajucmuaxdu\",\"ksgeqpaillf\":\"databbwejr\"}},\"formatSettings\":{\"type\":\"AvroWriteSettings\",\"recordName\":\"sfmeotvnetee\",\"recordNamespace\":\"dfpflffd\",\"maxRowsPerFile\":\"datanaoehkgpks\",\"fileNamePrefix\":\"databwkwxdgcf\",\"\":{\"jwxhslrbwwk\":\"datayy\"}},\"writeBatchSize\":\"datawodhsodofsxjiky\",\"writeBatchTimeout\":\"dataquhuixqwoggw\",\"sinkRetryCount\":\"datadmxhuw\",\"sinkRetryWait\":\"dataf\",\"maxConcurrentConnections\":\"datakyft\",\"disableMetricsCollection\":\"datakbgsgopyckmncru\",\"\":{\"qkgixfnrneyav\":\"datadjmda\",\"iizbwfjumulhf\":\"datadovpwrqcfzokplz\",\"qcapbkfvowzbk\":\"datadgnchahldnrpt\"}}") - .toObject(AvroSink.class); - Assertions.assertEquals("sfmeotvnetee", model.formatSettings().recordName()); - Assertions.assertEquals("dfpflffd", model.formatSettings().recordNamespace()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AvroSink model = new AvroSink().withWriteBatchSize("datawodhsodofsxjiky") - .withWriteBatchTimeout("dataquhuixqwoggw") - .withSinkRetryCount("datadmxhuw") - .withSinkRetryWait("dataf") - .withMaxConcurrentConnections("datakyft") - .withDisableMetricsCollection("datakbgsgopyckmncru") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datahuab") - .withDisableMetricsCollection("datafh") - .withCopyBehavior("dataoxnpuap") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataekiqlscmtcl").withValue("datapivt"), - new MetadataItem().withName("datavcfch").withValue("datakcjjnqxjj"), - new MetadataItem().withName("datayajdf").withValue("dataxjm"), - new MetadataItem().withName("datagfbzbxeqzvokfr").withValue("dataaf"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new AvroWriteSettings().withRecordName("sfmeotvnetee") - .withRecordNamespace("dfpflffd") - .withMaxRowsPerFile("datanaoehkgpks") - .withFileNamePrefix("databwkwxdgcf")); - model = BinaryData.fromObject(model).toObject(AvroSink.class); - Assertions.assertEquals("sfmeotvnetee", model.formatSettings().recordName()); - Assertions.assertEquals("dfpflffd", model.formatSettings().recordNamespace()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java deleted file mode 100644 index 090a56b5c17e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroSourceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AvroSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class AvroSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AvroSource model = BinaryData.fromString( - "{\"type\":\"AvroSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataryu\",\"disableMetricsCollection\":\"dataasnigh\",\"\":{\"pfudzntbzg\":\"datakhiihggzqheittym\",\"kwypbq\":\"datagw\",\"gsbuqdpzhqeczzut\":\"dataxpohcrkmbajyu\"}},\"additionalColumns\":\"dataidsxrexbargbm\",\"sourceRetryCount\":\"dataopypcuom\",\"sourceRetryWait\":\"dataucjznnowpvxuuvhw\",\"maxConcurrentConnections\":\"datame\",\"disableMetricsCollection\":\"datahw\",\"\":{\"hlqtqjabwtkpjy\":\"datakj\"}}") - .toObject(AvroSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AvroSource model = new AvroSource().withSourceRetryCount("dataopypcuom") - .withSourceRetryWait("dataucjznnowpvxuuvhw") - .withMaxConcurrentConnections("datame") - .withDisableMetricsCollection("datahw") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataryu") - .withDisableMetricsCollection("dataasnigh") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("dataidsxrexbargbm"); - model = BinaryData.fromObject(model).toObject(AvroSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java deleted file mode 100644 index 00542b58b59b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AvroWriteSettingsTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AvroWriteSettings; -import org.junit.jupiter.api.Assertions; - -public final class AvroWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AvroWriteSettings model = BinaryData.fromString( - "{\"type\":\"AvroWriteSettings\",\"recordName\":\"jqafsxvqq\",\"recordNamespace\":\"zoblxxk\",\"maxRowsPerFile\":\"datautizpv\",\"fileNamePrefix\":\"datahgxsdo\",\"\":{\"rjtu\":\"datafodokhaogewdhln\",\"q\":\"dataarjiriccu\"}}") - .toObject(AvroWriteSettings.class); - Assertions.assertEquals("jqafsxvqq", model.recordName()); - Assertions.assertEquals("zoblxxk", model.recordNamespace()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AvroWriteSettings model = new AvroWriteSettings().withRecordName("jqafsxvqq") - .withRecordNamespace("zoblxxk") - .withMaxRowsPerFile("datautizpv") - .withFileNamePrefix("datahgxsdo"); - model = BinaryData.fromObject(model).toObject(AvroWriteSettings.class); - Assertions.assertEquals("jqafsxvqq", model.recordName()); - Assertions.assertEquals("zoblxxk", model.recordNamespace()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java deleted file mode 100644 index 2369f3bc3c14..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzPowerShellSetup; -import org.junit.jupiter.api.Assertions; - -public final class AzPowerShellSetupTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzPowerShellSetup model - = BinaryData.fromString("{\"type\":\"AzPowerShellSetup\",\"typeProperties\":{\"version\":\"wbnqin\"}}") - .toObject(AzPowerShellSetup.class); - Assertions.assertEquals("wbnqin", model.version()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzPowerShellSetup model = new AzPowerShellSetup().withVersion("wbnqin"); - model = BinaryData.fromObject(model).toObject(AzPowerShellSetup.class); - Assertions.assertEquals("wbnqin", model.version()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java deleted file mode 100644 index f0b3b5425dfa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzPowerShellSetupTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzPowerShellSetupTypeProperties; -import org.junit.jupiter.api.Assertions; - -public final class AzPowerShellSetupTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzPowerShellSetupTypeProperties model - = BinaryData.fromString("{\"version\":\"sx\"}").toObject(AzPowerShellSetupTypeProperties.class); - Assertions.assertEquals("sx", model.version()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzPowerShellSetupTypeProperties model = new AzPowerShellSetupTypeProperties().withVersion("sx"); - model = BinaryData.fromObject(model).toObject(AzPowerShellSetupTypeProperties.class); - Assertions.assertEquals("sx", model.version()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java deleted file mode 100644 index dbbfd9fb35a1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTests.java +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobDataset; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureBlobDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobDataset model = BinaryData.fromString( - "{\"type\":\"AzureBlob\",\"typeProperties\":{\"folderPath\":\"datazwbxgdebxlayuno\",\"tableRootLocation\":\"datarh\",\"fileName\":\"dataabenqlamwmg\",\"modifiedDatetimeStart\":\"dataayxflnbcpjstbh\",\"modifiedDatetimeEnd\":\"datahcucsqsnx\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataufeapdrbzyvb\",\"deserializer\":\"dataadulpodkaxp\",\"\":{\"qltd\":\"datakzhmhj\",\"rkgzkyhudbkuw\":\"dataluqrojadhfztlray\"}},\"compression\":{\"type\":\"dataqxlcweakfecjvx\",\"level\":\"dataqufqizj\",\"\":{\"a\":\"datawo\",\"izxpxhnzlsle\":\"dataeyyjmjj\",\"qpdwad\":\"datacttgzkjt\"}}},\"description\":\"hz\",\"structure\":\"datadsvcz\",\"schema\":\"dataiahuqymjzucw\",\"linkedServiceName\":{\"referenceName\":\"mejjqhddwvmq\",\"parameters\":{\"bn\":\"dataookyfoz\",\"ypfqpgaixw\":\"datax\",\"kwepwo\":\"datagrkkderfrswq\",\"d\":\"dataggicwnxhtfmcqbs\"}},\"parameters\":{\"umjdjxhzghg\":{\"type\":\"Float\",\"defaultValue\":\"datazpgf\"},\"mxjdnaju\":{\"type\":\"Object\",\"defaultValue\":\"dataynrceqavfdbdf\"},\"qmkw\":{\"type\":\"Bool\",\"defaultValue\":\"datay\"}},\"annotations\":[\"datacwlo\",\"datadejkluxxrwzobuz\"],\"folder\":{\"name\":\"g\"},\"\":{\"zdprmimrljdpoqfx\":\"datadtkwppth\",\"cloamfmxtllflt\":\"dataemhkftbaewht\",\"nrrfijhggabqb\":\"datamq\"}}") - .toObject(AzureBlobDataset.class); - Assertions.assertEquals("hz", model.description()); - Assertions.assertEquals("mejjqhddwvmq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("umjdjxhzghg").type()); - Assertions.assertEquals("g", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobDataset model = new AzureBlobDataset().withDescription("hz") - .withStructure("datadsvcz") - .withSchema("dataiahuqymjzucw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mejjqhddwvmq") - .withParameters(mapOf("bn", "dataookyfoz", "ypfqpgaixw", "datax", "kwepwo", "datagrkkderfrswq", "d", - "dataggicwnxhtfmcqbs"))) - .withParameters(mapOf("umjdjxhzghg", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datazpgf"), "mxjdnaju", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataynrceqavfdbdf"), - "qmkw", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datay"))) - .withAnnotations(Arrays.asList("datacwlo", "datadejkluxxrwzobuz")) - .withFolder(new DatasetFolder().withName("g")) - .withFolderPath("datazwbxgdebxlayuno") - .withTableRootLocation("datarh") - .withFileName("dataabenqlamwmg") - .withModifiedDatetimeStart("dataayxflnbcpjstbh") - .withModifiedDatetimeEnd("datahcucsqsnx") - .withFormat(new DatasetStorageFormat().withSerializer("dataufeapdrbzyvb") - .withDeserializer("dataadulpodkaxp") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("dataqxlcweakfecjvx") - .withLevel("dataqufqizj") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureBlobDataset.class); - Assertions.assertEquals("hz", model.description()); - Assertions.assertEquals("mejjqhddwvmq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("umjdjxhzghg").type()); - Assertions.assertEquals("g", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java deleted file mode 100644 index 2018ea0d964b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobDatasetTypePropertiesTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class AzureBlobDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datamkli\",\"tableRootLocation\":\"datarwdvffhsdpzou\",\"fileName\":\"datatqrxqwqnjxrdf\",\"modifiedDatetimeStart\":\"datat\",\"modifiedDatetimeEnd\":\"datalygtavczcxdfweap\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datalxrljphraspifl\",\"deserializer\":\"datam\",\"\":{\"ehfgsm\":\"datamdbgi\",\"mtznpaxwfqtyyqi\":\"datarjuqbpxtokl\"}},\"compression\":{\"type\":\"datacltungbso\",\"level\":\"datackmiig\",\"\":{\"kdskswtiiqqc\":\"datauck\",\"lnssw\":\"dataikclsm\",\"ru\":\"dataykdnonaaxwmg\",\"iqbcbgv\":\"datalqcwnynll\"}}}") - .toObject(AzureBlobDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobDatasetTypeProperties model = new AzureBlobDatasetTypeProperties().withFolderPath("datamkli") - .withTableRootLocation("datarwdvffhsdpzou") - .withFileName("datatqrxqwqnjxrdf") - .withModifiedDatetimeStart("datat") - .withModifiedDatetimeEnd("datalygtavczcxdfweap") - .withFormat(new DatasetStorageFormat().withSerializer("datalxrljphraspifl") - .withDeserializer("datam") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datacltungbso") - .withLevel("datackmiig") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureBlobDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java deleted file mode 100644 index ed42b1b26015..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTests.java +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSDataset; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureBlobFSDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSDataset model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSFile\",\"typeProperties\":{\"folderPath\":\"datamdrcjlvkrkegtyc\",\"fileName\":\"datappiyxlzmiyddeeqz\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"databm\",\"deserializer\":\"datas\",\"\":{\"trlq\":\"datauetbapfczew\",\"pirgzxvbczwhy\":\"datapxyazk\"}},\"compression\":{\"type\":\"databthmsritj\",\"level\":\"dataiggrunoz\",\"\":{\"krfihscjv\":\"dataaljthoivsdwsn\"}}},\"description\":\"mhzbhwahfbwih\",\"structure\":\"datanx\",\"schema\":\"datavynuqqkotauratn\",\"linkedServiceName\":{\"referenceName\":\"cppfzsclefyrle\",\"parameters\":{\"n\":\"dataqlmfdg\",\"fleev\":\"databuypwovvv\",\"pxrl\":\"dataoqayrehjuqwv\"}},\"parameters\":{\"qzlxn\":{\"type\":\"Object\",\"defaultValue\":\"datahacen\"}},\"annotations\":[\"databfonfdbgmkfwmj\",\"datawtewf\",\"dataxwyrkbrehzlr\"],\"folder\":{\"name\":\"pcha\"},\"\":{\"rieikmwlaoklfnis\":\"dataeplrajubowuywevt\"}}") - .toObject(AzureBlobFSDataset.class); - Assertions.assertEquals("mhzbhwahfbwih", model.description()); - Assertions.assertEquals("cppfzsclefyrle", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("qzlxn").type()); - Assertions.assertEquals("pcha", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSDataset model = new AzureBlobFSDataset().withDescription("mhzbhwahfbwih") - .withStructure("datanx") - .withSchema("datavynuqqkotauratn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cppfzsclefyrle") - .withParameters(mapOf("n", "dataqlmfdg", "fleev", "databuypwovvv", "pxrl", "dataoqayrehjuqwv"))) - .withParameters(mapOf("qzlxn", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datahacen"))) - .withAnnotations(Arrays.asList("databfonfdbgmkfwmj", "datawtewf", "dataxwyrkbrehzlr")) - .withFolder(new DatasetFolder().withName("pcha")) - .withFolderPath("datamdrcjlvkrkegtyc") - .withFileName("datappiyxlzmiyddeeqz") - .withFormat(new DatasetStorageFormat().withSerializer("databm") - .withDeserializer("datas") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("databthmsritj") - .withLevel("dataiggrunoz") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureBlobFSDataset.class); - Assertions.assertEquals("mhzbhwahfbwih", model.description()); - Assertions.assertEquals("cppfzsclefyrle", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("qzlxn").type()); - Assertions.assertEquals("pcha", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java deleted file mode 100644 index a2119460d973..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSDatasetTypePropertiesTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureBlobFSDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class AzureBlobFSDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datagucbmt\",\"fileName\":\"datadscnns\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datayyuvtzrxzhclec\",\"deserializer\":\"datatzqzcloy\",\"\":{\"wbyorjplbchych\":\"datapgidhzgyresgzsdt\"}},\"compression\":{\"type\":\"datavyrfbqvum\",\"level\":\"dataqj\",\"\":{\"t\":\"dataepmaxfnzlpqmp\"}}}") - .toObject(AzureBlobFSDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSDatasetTypeProperties model = new AzureBlobFSDatasetTypeProperties().withFolderPath("datagucbmt") - .withFileName("datadscnns") - .withFormat(new DatasetStorageFormat().withSerializer("datayyuvtzrxzhclec") - .withDeserializer("datatzqzcloy") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datavyrfbqvum") - .withLevel("dataqj") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureBlobFSDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java deleted file mode 100644 index ab509b27b0b3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSLocationTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSLocation; - -public final class AzureBlobFSLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSLocation model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSLocation\",\"fileSystem\":\"dataixwfgcdi\",\"folderPath\":\"datakcxwnujvqynvavi\",\"fileName\":\"datadmuqohhi\",\"\":{\"cljbrhlhpvzad\":\"dataxquddrw\"}}") - .toObject(AzureBlobFSLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSLocation model = new AzureBlobFSLocation().withFolderPath("datakcxwnujvqynvavi") - .withFileName("datadmuqohhi") - .withFileSystem("dataixwfgcdi"); - model = BinaryData.fromObject(model).toObject(AzureBlobFSLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java deleted file mode 100644 index 26c0116f0e3c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSReadSettingsTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSReadSettings; - -public final class AzureBlobFSReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSReadSettings\",\"recursive\":\"datagro\",\"wildcardFolderPath\":\"dataxsqdzyyalrx\",\"wildcardFileName\":\"dataqpqfw\",\"fileListPath\":\"datalfjkbaxv\",\"enablePartitionDiscovery\":\"dataeimuipggt\",\"partitionRootPath\":\"dataxbfhb\",\"deleteFilesAfterCompletion\":\"dataldziph\",\"modifiedDatetimeStart\":\"datafefznxcleyamv\",\"modifiedDatetimeEnd\":\"datatjjhqvypqgncgw\",\"maxConcurrentConnections\":\"databtkafcnfit\",\"disableMetricsCollection\":\"datack\",\"\":{\"goudcl\":\"datawyjdvyxxbawjgy\",\"disjeovgc\":\"dataidqlynx\"}}") - .toObject(AzureBlobFSReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSReadSettings model = new AzureBlobFSReadSettings().withMaxConcurrentConnections("databtkafcnfit") - .withDisableMetricsCollection("datack") - .withRecursive("datagro") - .withWildcardFolderPath("dataxsqdzyyalrx") - .withWildcardFileName("dataqpqfw") - .withFileListPath("datalfjkbaxv") - .withEnablePartitionDiscovery("dataeimuipggt") - .withPartitionRootPath("dataxbfhb") - .withDeleteFilesAfterCompletion("dataldziph") - .withModifiedDatetimeStart("datafefznxcleyamv") - .withModifiedDatetimeEnd("datatjjhqvypqgncgw"); - model = BinaryData.fromObject(model).toObject(AzureBlobFSReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java deleted file mode 100644 index b770530b405f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSinkTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSSink; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class AzureBlobFSSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSSink model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSSink\",\"copyBehavior\":\"datat\",\"metadata\":[{\"name\":\"datawzjycg\",\"value\":\"dataeagxikzvnghtknrw\"},{\"name\":\"dataysnmy\",\"value\":\"datafmlcnrapxw\"},{\"name\":\"datapxoelfobehr\",\"value\":\"dataglojjcziytf\"}],\"writeBatchSize\":\"datavirmbr\",\"writeBatchTimeout\":\"datagnqa\",\"sinkRetryCount\":\"datak\",\"sinkRetryWait\":\"datafyb\",\"maxConcurrentConnections\":\"datadzvuhw\",\"disableMetricsCollection\":\"datanazjvyiiezdnez\",\"\":{\"rm\":\"datazd\"}}") - .toObject(AzureBlobFSSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSSink model = new AzureBlobFSSink().withWriteBatchSize("datavirmbr") - .withWriteBatchTimeout("datagnqa") - .withSinkRetryCount("datak") - .withSinkRetryWait("datafyb") - .withMaxConcurrentConnections("datadzvuhw") - .withDisableMetricsCollection("datanazjvyiiezdnez") - .withCopyBehavior("datat") - .withMetadata(Arrays.asList(new MetadataItem().withName("datawzjycg").withValue("dataeagxikzvnghtknrw"), - new MetadataItem().withName("dataysnmy").withValue("datafmlcnrapxw"), - new MetadataItem().withName("datapxoelfobehr").withValue("dataglojjcziytf"))); - model = BinaryData.fromObject(model).toObject(AzureBlobFSSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java deleted file mode 100644 index 02eb310ca9ad..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSSource; - -public final class AzureBlobFSSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSSource model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSSource\",\"treatEmptyAsNull\":\"dataofmrph\",\"skipHeaderLineCount\":\"datas\",\"recursive\":\"dataunkcgdnhacex\",\"sourceRetryCount\":\"dataomrrjooepfb\",\"sourceRetryWait\":\"databffxansgntjmnl\",\"maxConcurrentConnections\":\"datalrjdkyp\",\"disableMetricsCollection\":\"datavilgn\",\"\":{\"akuzezwnqhcpk\":\"datajbldgikokjwge\",\"qcutk\":\"datagsyszdtgw\",\"jtoypluxvjutckfh\":\"dataprourtmccd\",\"zvujp\":\"datadcvlbxr\"}}") - .toObject(AzureBlobFSSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSSource model = new AzureBlobFSSource().withSourceRetryCount("dataomrrjooepfb") - .withSourceRetryWait("databffxansgntjmnl") - .withMaxConcurrentConnections("datalrjdkyp") - .withDisableMetricsCollection("datavilgn") - .withTreatEmptyAsNull("dataofmrph") - .withSkipHeaderLineCount("datas") - .withRecursive("dataunkcgdnhacex"); - model = BinaryData.fromObject(model).toObject(AzureBlobFSSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java deleted file mode 100644 index c1cdb156ff5e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobFSWriteSettingsTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobFSWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class AzureBlobFSWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobFSWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobFSWriteSettings\",\"blockSizeInMB\":\"dataibnoebffkgfi\",\"maxConcurrentConnections\":\"datalr\",\"disableMetricsCollection\":\"datawaapzrkhpty\",\"copyBehavior\":\"dataiqeoajnaotavwmr\",\"metadata\":[{\"name\":\"datagdfecju\",\"value\":\"datayrwvo\"},{\"name\":\"datamcwxbac\",\"value\":\"datapslfqgfwonbtgp\"},{\"name\":\"datazuzxoeouf\",\"value\":\"datajwgfhd\"}],\"\":{\"jg\":\"datascwlcfcpz\",\"osewfbllegezvwu\":\"databbfgmeqhtngrxfq\"}}") - .toObject(AzureBlobFSWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobFSWriteSettings model = new AzureBlobFSWriteSettings().withMaxConcurrentConnections("datalr") - .withDisableMetricsCollection("datawaapzrkhpty") - .withCopyBehavior("dataiqeoajnaotavwmr") - .withMetadata(Arrays.asList(new MetadataItem().withName("datagdfecju").withValue("datayrwvo"), - new MetadataItem().withName("datamcwxbac").withValue("datapslfqgfwonbtgp"), - new MetadataItem().withName("datazuzxoeouf").withValue("datajwgfhd"))) - .withBlockSizeInMB("dataibnoebffkgfi"); - model = BinaryData.fromObject(model).toObject(AzureBlobFSWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java deleted file mode 100644 index 6235af86d81f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageLocationTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobStorageLocation; - -public final class AzureBlobStorageLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobStorageLocation model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageLocation\",\"container\":\"datai\",\"folderPath\":\"datauktirzkaugpu\",\"fileName\":\"dataocf\",\"\":{\"rjlvzklkvbgi\":\"datawgofmhx\",\"shjvpzaptuoskaoi\":\"datayjtkakvlb\"}}") - .toObject(AzureBlobStorageLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobStorageLocation model = new AzureBlobStorageLocation().withFolderPath("datauktirzkaugpu") - .withFileName("dataocf") - .withContainer("datai"); - model = BinaryData.fromObject(model).toObject(AzureBlobStorageLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java deleted file mode 100644 index 637025a89832..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageReadSettingsTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobStorageReadSettings; - -public final class AzureBlobStorageReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageReadSettings\",\"recursive\":\"datayxhcw\",\"wildcardFolderPath\":\"datat\",\"wildcardFileName\":\"dataoachoji\",\"prefix\":\"dataaqqfrgiplxr\",\"fileListPath\":\"databsbkvfk\",\"enablePartitionDiscovery\":\"databbpoijs\",\"partitionRootPath\":\"dataru\",\"deleteFilesAfterCompletion\":\"dataituufknarsc\",\"modifiedDatetimeStart\":\"dataktpj\",\"modifiedDatetimeEnd\":\"datazos\",\"maxConcurrentConnections\":\"datasmvnnjwnwo\",\"disableMetricsCollection\":\"dataezwwqchxowpp\",\"\":{\"ehnqirrgnvuo\":\"dataqactoq\",\"eq\":\"datavptvnlbkizebbrwl\",\"bwuamo\":\"dataisoiqgetzgwywy\",\"chengmxpjkuq\":\"dataopqqiyjrehe\"}}") - .toObject(AzureBlobStorageReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobStorageReadSettings model - = new AzureBlobStorageReadSettings().withMaxConcurrentConnections("datasmvnnjwnwo") - .withDisableMetricsCollection("dataezwwqchxowpp") - .withRecursive("datayxhcw") - .withWildcardFolderPath("datat") - .withWildcardFileName("dataoachoji") - .withPrefix("dataaqqfrgiplxr") - .withFileListPath("databsbkvfk") - .withEnablePartitionDiscovery("databbpoijs") - .withPartitionRootPath("dataru") - .withDeleteFilesAfterCompletion("dataituufknarsc") - .withModifiedDatetimeStart("dataktpj") - .withModifiedDatetimeEnd("datazos"); - model = BinaryData.fromObject(model).toObject(AzureBlobStorageReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java deleted file mode 100644 index a60438ac0daf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureBlobStorageWriteSettingsTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureBlobStorageWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class AzureBlobStorageWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureBlobStorageWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureBlobStorageWriteSettings\",\"blockSizeInMB\":\"datakrgryfnbxwxlh\",\"maxConcurrentConnections\":\"datazhxothfyifjufzlg\",\"disableMetricsCollection\":\"datasmt\",\"copyBehavior\":\"datamnuzyyc\",\"metadata\":[{\"name\":\"datarywdezpry\",\"value\":\"dataxjgyamochpwwyk\"}],\"\":{\"khmoudmcad\":\"datajkztdckwqv\",\"az\":\"dataojfuvmjtx\"}}") - .toObject(AzureBlobStorageWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureBlobStorageWriteSettings model - = new AzureBlobStorageWriteSettings().withMaxConcurrentConnections("datazhxothfyifjufzlg") - .withDisableMetricsCollection("datasmt") - .withCopyBehavior("datamnuzyyc") - .withMetadata( - Arrays.asList(new MetadataItem().withName("datarywdezpry").withValue("dataxjgyamochpwwyk"))) - .withBlockSizeInMB("datakrgryfnbxwxlh"); - model = BinaryData.fromObject(model).toObject(AzureBlobStorageWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java deleted file mode 100644 index caca7fcd8e5c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTests.java +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AzureDataExplorerCommandActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureDataExplorerCommandActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerCommandActivity model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerCommand\",\"typeProperties\":{\"command\":\"datajnatbecuhwcakke\",\"commandTimeout\":\"datazaousjl\"},\"linkedServiceName\":{\"referenceName\":\"jj\",\"parameters\":{\"d\":\"datazcknpmevahq\"}},\"policy\":{\"timeout\":\"dataoldwahe\",\"retry\":\"datadicxolmm\",\"retryIntervalInSeconds\":120968442,\"secureInput\":false,\"secureOutput\":true,\"\":{\"qvmp\":\"databm\",\"ripopzydespwwkd\":\"datacjvog\",\"jq\":\"datasnezdu\",\"bqmelmqkbepie\":\"datahrgwy\"}},\"name\":\"sveaerg\",\"description\":\"brnlbfnuppwqks\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jboyggrmzt\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Skipped\"],\"\":{\"gmtgoeayhojdgwez\":\"datawstabgejopv\"}},{\"activity\":\"r\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"gkfx\":\"datazcfdpxbw\",\"yevvuddnwj\":\"datadtj\",\"flemxbmaiiv\":\"databc\"}},{\"activity\":\"pftzbkev\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"dyl\":\"datapumpqlugz\",\"dmfo\":\"datarsis\",\"svfnxxkmrfz\":\"datai\",\"azrfhfjwikvakj\":\"datatkprbm\"}}],\"userProperties\":[{\"name\":\"hqilvajc\",\"value\":\"datapwlf\"},{\"name\":\"fardjqwdrooooobs\",\"value\":\"datadqv\"},{\"name\":\"qcme\",\"value\":\"datawajjzxcqnl\"}],\"\":{\"xau\":\"datawocwmadyelwol\"}}") - .toObject(AzureDataExplorerCommandActivity.class); - Assertions.assertEquals("sveaerg", model.name()); - Assertions.assertEquals("brnlbfnuppwqks", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("jboyggrmzt", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("hqilvajc", model.userProperties().get(0).name()); - Assertions.assertEquals("jj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(120968442, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerCommandActivity model = new AzureDataExplorerCommandActivity().withName("sveaerg") - .withDescription("brnlbfnuppwqks") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("jboyggrmzt") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("r") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("pftzbkev") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("hqilvajc").withValue("datapwlf"), - new UserProperty().withName("fardjqwdrooooobs").withValue("datadqv"), - new UserProperty().withName("qcme").withValue("datawajjzxcqnl"))) - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("jj").withParameters(mapOf("d", "datazcknpmevahq"))) - .withPolicy(new ActivityPolicy().withTimeout("dataoldwahe") - .withRetry("datadicxolmm") - .withRetryIntervalInSeconds(120968442) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withCommand("datajnatbecuhwcakke") - .withCommandTimeout("datazaousjl"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerCommandActivity.class); - Assertions.assertEquals("sveaerg", model.name()); - Assertions.assertEquals("brnlbfnuppwqks", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("jboyggrmzt", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("hqilvajc", model.userProperties().get(0).name()); - Assertions.assertEquals("jj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(120968442, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java deleted file mode 100644 index 2c5c85e9ed8c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerCommandActivityTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerCommandActivityTypeProperties; - -public final class AzureDataExplorerCommandActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerCommandActivityTypeProperties model - = BinaryData.fromString("{\"command\":\"dataanfjrdcawazqlda\",\"commandTimeout\":\"dataijcx\"}") - .toObject(AzureDataExplorerCommandActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerCommandActivityTypeProperties model - = new AzureDataExplorerCommandActivityTypeProperties().withCommand("dataanfjrdcawazqlda") - .withCommandTimeout("dataijcx"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerCommandActivityTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java deleted file mode 100644 index b42bdf93627d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureDataExplorerDatasetTypeProperties; - -public final class AzureDataExplorerDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerDatasetTypeProperties model = BinaryData.fromString("{\"table\":\"datahuecxhgs\"}") - .toObject(AzureDataExplorerDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerDatasetTypeProperties model - = new AzureDataExplorerDatasetTypeProperties().withTable("datahuecxhgs"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java deleted file mode 100644 index bdde9b6bdcdd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSinkTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataExplorerSink; - -public final class AzureDataExplorerSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerSink model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerSink\",\"ingestionMappingName\":\"dataik\",\"ingestionMappingAsJson\":\"datatrdbtrkvluu\",\"flushImmediately\":\"datasu\",\"writeBatchSize\":\"datafrwmct\",\"writeBatchTimeout\":\"datagjqcksfbkr\",\"sinkRetryCount\":\"dataulpy\",\"sinkRetryWait\":\"dataqsiniej\",\"maxConcurrentConnections\":\"datavvvtxkwrvtl\",\"disableMetricsCollection\":\"dataukbdtmr\",\"\":{\"f\":\"datau\",\"ui\":\"datavbpvizuu\"}}") - .toObject(AzureDataExplorerSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerSink model = new AzureDataExplorerSink().withWriteBatchSize("datafrwmct") - .withWriteBatchTimeout("datagjqcksfbkr") - .withSinkRetryCount("dataulpy") - .withSinkRetryWait("dataqsiniej") - .withMaxConcurrentConnections("datavvvtxkwrvtl") - .withDisableMetricsCollection("dataukbdtmr") - .withIngestionMappingName("dataik") - .withIngestionMappingAsJson("datatrdbtrkvluu") - .withFlushImmediately("datasu"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java deleted file mode 100644 index 8b4c41e6630c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataExplorerSource; - -public final class AzureDataExplorerSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerSource model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerSource\",\"query\":\"datagvtzdxtwyxpkwwdk\",\"noTruncation\":\"datadevdvke\",\"queryTimeout\":\"dataxjchd\",\"additionalColumns\":\"dataxeiluexvmlg\",\"sourceRetryCount\":\"datapsqpfxjwta\",\"sourceRetryWait\":\"dataqkguchdyxrjjdj\",\"maxConcurrentConnections\":\"dataiqtz\",\"disableMetricsCollection\":\"datagddkujvqzcuqculw\",\"\":{\"e\":\"datayyqtjcrpaxwxlfx\"}}") - .toObject(AzureDataExplorerSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerSource model = new AzureDataExplorerSource().withSourceRetryCount("datapsqpfxjwta") - .withSourceRetryWait("dataqkguchdyxrjjdj") - .withMaxConcurrentConnections("dataiqtz") - .withDisableMetricsCollection("datagddkujvqzcuqculw") - .withQuery("datagvtzdxtwyxpkwwdk") - .withNoTruncation("datadevdvke") - .withQueryTimeout("dataxjchd") - .withAdditionalColumns("dataxeiluexvmlg"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java deleted file mode 100644 index 6de158e5994d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataExplorerTableDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataExplorerTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureDataExplorerTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataExplorerTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureDataExplorerTable\",\"typeProperties\":{\"table\":\"datagjndkvzmxl\"},\"description\":\"qgdodn\",\"structure\":\"datayipgkmjt\",\"schema\":\"datazmdzesimeft\",\"linkedServiceName\":{\"referenceName\":\"drfzjlflzagvdava\",\"parameters\":{\"bidaeb\":\"dataefcor\",\"e\":\"datazni\",\"ws\":\"datadfajwiylciobbp\"}},\"parameters\":{\"nteevfgaxfez\":{\"type\":\"String\",\"defaultValue\":\"datauecuuuexs\"}},\"annotations\":[\"datasddkodkgxq\",\"datakyrxgmzzeglwd\"],\"folder\":{\"name\":\"spfegaoksdd\"},\"\":{\"lxkelmzpyq\":\"dataajfyu\"}}") - .toObject(AzureDataExplorerTableDataset.class); - Assertions.assertEquals("qgdodn", model.description()); - Assertions.assertEquals("drfzjlflzagvdava", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nteevfgaxfez").type()); - Assertions.assertEquals("spfegaoksdd", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataExplorerTableDataset model = new AzureDataExplorerTableDataset().withDescription("qgdodn") - .withStructure("datayipgkmjt") - .withSchema("datazmdzesimeft") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("drfzjlflzagvdava") - .withParameters(mapOf("bidaeb", "dataefcor", "e", "datazni", "ws", "datadfajwiylciobbp"))) - .withParameters(mapOf("nteevfgaxfez", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datauecuuuexs"))) - .withAnnotations(Arrays.asList("datasddkodkgxq", "datakyrxgmzzeglwd")) - .withFolder(new DatasetFolder().withName("spfegaoksdd")) - .withTable("datagjndkvzmxl"); - model = BinaryData.fromObject(model).toObject(AzureDataExplorerTableDataset.class); - Assertions.assertEquals("qgdodn", model.description()); - Assertions.assertEquals("drfzjlflzagvdava", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nteevfgaxfez").type()); - Assertions.assertEquals("spfegaoksdd", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java deleted file mode 100644 index dea7f811efd4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTests.java +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreDataset; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureDataLakeStoreDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreDataset model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreFile\",\"typeProperties\":{\"folderPath\":\"dataxessmvrk\",\"fileName\":\"datasqeq\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataxm\",\"deserializer\":\"dataesbrujbjpppktl\",\"\":{\"hieatnejr\":\"databfm\"}},\"compression\":{\"type\":\"datainzqplgtkiho\",\"level\":\"datakzsrz\",\"\":{\"sckhbmc\":\"datajilzfbpntogke\"}}},\"description\":\"moyfxx\",\"structure\":\"dataykuqdnd\",\"schema\":\"dataykhkg\",\"linkedServiceName\":{\"referenceName\":\"apvd\",\"parameters\":{\"rnrnjrcufmbgacnr\":\"datapeerscd\",\"eubkqiqmlf\":\"datafdtncmspsanma\",\"skkqjmxptuei\":\"datahlq\"}},\"parameters\":{\"mrpahuuonj\":{\"type\":\"Object\",\"defaultValue\":\"datawdr\"}},\"annotations\":[\"dataukguehvvp\",\"datajoegcjojlleuidp\"],\"folder\":{\"name\":\"nvhgnhtmeplh\"},\"\":{\"ummmkvavucgji\":\"databap\"}}") - .toObject(AzureDataLakeStoreDataset.class); - Assertions.assertEquals("moyfxx", model.description()); - Assertions.assertEquals("apvd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("mrpahuuonj").type()); - Assertions.assertEquals("nvhgnhtmeplh", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreDataset model = new AzureDataLakeStoreDataset().withDescription("moyfxx") - .withStructure("dataykuqdnd") - .withSchema("dataykhkg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("apvd") - .withParameters(mapOf("rnrnjrcufmbgacnr", "datapeerscd", "eubkqiqmlf", "datafdtncmspsanma", - "skkqjmxptuei", "datahlq"))) - .withParameters(mapOf("mrpahuuonj", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawdr"))) - .withAnnotations(Arrays.asList("dataukguehvvp", "datajoegcjojlleuidp")) - .withFolder(new DatasetFolder().withName("nvhgnhtmeplh")) - .withFolderPath("dataxessmvrk") - .withFileName("datasqeq") - .withFormat(new DatasetStorageFormat().withSerializer("dataxm") - .withDeserializer("dataesbrujbjpppktl") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datainzqplgtkiho") - .withLevel("datakzsrz") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreDataset.class); - Assertions.assertEquals("moyfxx", model.description()); - Assertions.assertEquals("apvd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("mrpahuuonj").type()); - Assertions.assertEquals("nvhgnhtmeplh", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java deleted file mode 100644 index 0bd65f7a8098..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreDatasetTypePropertiesTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureDataLakeStoreDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class AzureDataLakeStoreDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"dataiqpmnufz\",\"fileName\":\"datachxwwuzdmh\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datavivjm\",\"deserializer\":\"datalitqdsjipdvi\",\"\":{\"qoemwsi\":\"datatyxbriifefndslv\",\"deotmfx\":\"dataeailwdqmqf\"}},\"compression\":{\"type\":\"datad\",\"level\":\"datawgnamkuuy\",\"\":{\"hdcfm\":\"datauafixlxicwgp\",\"fasfod\":\"dataz\",\"kwgqrn\":\"dataopalvngtwyu\"}}}") - .toObject(AzureDataLakeStoreDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreDatasetTypeProperties model - = new AzureDataLakeStoreDatasetTypeProperties().withFolderPath("dataiqpmnufz") - .withFileName("datachxwwuzdmh") - .withFormat(new DatasetStorageFormat().withSerializer("datavivjm") - .withDeserializer("datalitqdsjipdvi") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datad") - .withLevel("datawgnamkuuy") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java deleted file mode 100644 index 827e14ae28a2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreLocationTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreLocation; - -public final class AzureDataLakeStoreLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreLocation model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreLocation\",\"folderPath\":\"dataenninafhxrzfrm\",\"fileName\":\"datatiucwv\",\"\":{\"rcqxgcbvzarmqc\":\"datalukh\",\"stsinvag\":\"datapo\",\"hqwhitxnmxgnmguz\":\"datavjyhdrxbrdvc\",\"kbd\":\"datauwvorbalkjn\"}}") - .toObject(AzureDataLakeStoreLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreLocation model - = new AzureDataLakeStoreLocation().withFolderPath("dataenninafhxrzfrm").withFileName("datatiucwv"); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java deleted file mode 100644 index 9e05be98bbee..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreReadSettingsTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreReadSettings; - -public final class AzureDataLakeStoreReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreReadSettings\",\"recursive\":\"datachgjonrhdib\",\"wildcardFolderPath\":\"datapd\",\"wildcardFileName\":\"datatmeou\",\"fileListPath\":\"datavkcnqonjbbhw\",\"listAfter\":\"datallzykzpjjdsl\",\"listBefore\":\"datayejsgbpjjnblbkak\",\"enablePartitionDiscovery\":\"datavwbd\",\"partitionRootPath\":\"dataddctkjc\",\"deleteFilesAfterCompletion\":\"dataxdir\",\"modifiedDatetimeStart\":\"dataqilsbabqtjch\",\"modifiedDatetimeEnd\":\"datafwe\",\"maxConcurrentConnections\":\"dataeiyem\",\"disableMetricsCollection\":\"datapszekdqqwcspf\",\"\":{\"oslqgsdqnqqzq\":\"datadqym\"}}") - .toObject(AzureDataLakeStoreReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreReadSettings model - = new AzureDataLakeStoreReadSettings().withMaxConcurrentConnections("dataeiyem") - .withDisableMetricsCollection("datapszekdqqwcspf") - .withRecursive("datachgjonrhdib") - .withWildcardFolderPath("datapd") - .withWildcardFileName("datatmeou") - .withFileListPath("datavkcnqonjbbhw") - .withListAfter("datallzykzpjjdsl") - .withListBefore("datayejsgbpjjnblbkak") - .withEnablePartitionDiscovery("datavwbd") - .withPartitionRootPath("dataddctkjc") - .withDeleteFilesAfterCompletion("dataxdir") - .withModifiedDatetimeStart("dataqilsbabqtjch") - .withModifiedDatetimeEnd("datafwe"); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java deleted file mode 100644 index d30b1dba9b4c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSinkTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreSink; - -public final class AzureDataLakeStoreSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreSink model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreSink\",\"copyBehavior\":\"databwarljplk\",\"enableAdlsSingleFileParallel\":\"datamxcdreqaqvspsy\",\"writeBatchSize\":\"dataarwtkrbscw\",\"writeBatchTimeout\":\"datawvwmcrhyo\",\"sinkRetryCount\":\"datatplmy\",\"sinkRetryWait\":\"datahvyj\",\"maxConcurrentConnections\":\"dataerh\",\"disableMetricsCollection\":\"datastiawywppq\",\"\":{\"bwsftytpjmufd\":\"dataxbdyczplmljcis\",\"ympsxmoad\":\"datanhqlzantahuykxs\",\"jvlzqsyvr\":\"dataiqnsmpfe\",\"bobx\":\"dataoleqikcorkem\"}}") - .toObject(AzureDataLakeStoreSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreSink model = new AzureDataLakeStoreSink().withWriteBatchSize("dataarwtkrbscw") - .withWriteBatchTimeout("datawvwmcrhyo") - .withSinkRetryCount("datatplmy") - .withSinkRetryWait("datahvyj") - .withMaxConcurrentConnections("dataerh") - .withDisableMetricsCollection("datastiawywppq") - .withCopyBehavior("databwarljplk") - .withEnableAdlsSingleFileParallel("datamxcdreqaqvspsy"); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java deleted file mode 100644 index aa50732af8f2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreSourceTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreSource; - -public final class AzureDataLakeStoreSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreSource model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreSource\",\"recursive\":\"datamtuxw\",\"sourceRetryCount\":\"dataazebifktnxu\",\"sourceRetryWait\":\"dataorbwyeyrn\",\"maxConcurrentConnections\":\"databyabtowbuuhlw\",\"disableMetricsCollection\":\"datavzuxfsmfpd\",\"\":{\"pljzrqwjtswemot\":\"dataqpzwfvnoylmfjylh\",\"jvctqaqczrwpae\":\"datakejyavk\",\"dsng\":\"dataklxsvcbrszltvm\"}}") - .toObject(AzureDataLakeStoreSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreSource model = new AzureDataLakeStoreSource().withSourceRetryCount("dataazebifktnxu") - .withSourceRetryWait("dataorbwyeyrn") - .withMaxConcurrentConnections("databyabtowbuuhlw") - .withDisableMetricsCollection("datavzuxfsmfpd") - .withRecursive("datamtuxw"); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java deleted file mode 100644 index d8edd455c874..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDataLakeStoreWriteSettingsTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDataLakeStoreWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class AzureDataLakeStoreWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDataLakeStoreWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureDataLakeStoreWriteSettings\",\"expiryDateTime\":\"dataismsggja\",\"maxConcurrentConnections\":\"dataazilqgvmiproqpyb\",\"disableMetricsCollection\":\"datakmdklwbqkmt\",\"copyBehavior\":\"dataaqqqtmpgr\",\"metadata\":[{\"name\":\"datatwedllpnoeb\",\"value\":\"datahyiohcj\"},{\"name\":\"dataduoggx\",\"value\":\"datam\"},{\"name\":\"dataguykrpzpmwz\",\"value\":\"datacrzdwebs\"},{\"name\":\"datapk\",\"value\":\"dataraidjeddnqm\"}],\"\":{\"xtnuc\":\"datahrhcfeqjkac\",\"hvfdosq\":\"dataojqoxpw\",\"cptvkbcykntdzze\":\"datadoyqbpzxushmltih\",\"e\":\"datarzpggs\"}}") - .toObject(AzureDataLakeStoreWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDataLakeStoreWriteSettings model - = new AzureDataLakeStoreWriteSettings().withMaxConcurrentConnections("dataazilqgvmiproqpyb") - .withDisableMetricsCollection("datakmdklwbqkmt") - .withCopyBehavior("dataaqqqtmpgr") - .withMetadata(Arrays.asList(new MetadataItem().withName("datatwedllpnoeb").withValue("datahyiohcj"), - new MetadataItem().withName("dataduoggx").withValue("datam"), - new MetadataItem().withName("dataguykrpzpmwz").withValue("datacrzdwebs"), - new MetadataItem().withName("datapk").withValue("dataraidjeddnqm"))) - .withExpiryDateTime("dataismsggja"); - model = BinaryData.fromObject(model).toObject(AzureDataLakeStoreWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java deleted file mode 100644 index 5bccbd845651..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureDatabricksDeltaLakeDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeDataset model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeDataset\",\"typeProperties\":{\"table\":\"datawisp\",\"database\":\"datagdblwj\"},\"description\":\"aqxaxtuxi\",\"structure\":\"datapbiichlygkv\",\"schema\":\"dataxwonkrnizdxywab\",\"linkedServiceName\":{\"referenceName\":\"itn\",\"parameters\":{\"nnewltonopffem\":\"dataptgvnaqyjukka\",\"qnxyd\":\"datawfhhawbabhzbfcdi\",\"zsuspaywvslq\":\"datazfoi\",\"ytqavouymkdeu\":\"dataronzeafkxfmuwdb\"}},\"parameters\":{\"tth\":{\"type\":\"Float\",\"defaultValue\":\"datapfdkaxgbiwpgopql\"}},\"annotations\":[\"datarmt\",\"datax\",\"datajmpdvrjzwaw\",\"dataewajccsdjuz\"],\"folder\":{\"name\":\"jtickzovguzpr\"},\"\":{\"qlrzhtocjzfp\":\"datahboigzxko\",\"jwgiitvjcmimbmsw\":\"dataexuvatzwn\"}}") - .toObject(AzureDatabricksDeltaLakeDataset.class); - Assertions.assertEquals("aqxaxtuxi", model.description()); - Assertions.assertEquals("itn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tth").type()); - Assertions.assertEquals("jtickzovguzpr", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeDataset model = new AzureDatabricksDeltaLakeDataset().withDescription("aqxaxtuxi") - .withStructure("datapbiichlygkv") - .withSchema("dataxwonkrnizdxywab") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("itn") - .withParameters(mapOf("nnewltonopffem", "dataptgvnaqyjukka", "qnxyd", "datawfhhawbabhzbfcdi", - "zsuspaywvslq", "datazfoi", "ytqavouymkdeu", "dataronzeafkxfmuwdb"))) - .withParameters(mapOf("tth", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapfdkaxgbiwpgopql"))) - .withAnnotations(Arrays.asList("datarmt", "datax", "datajmpdvrjzwaw", "dataewajccsdjuz")) - .withFolder(new DatasetFolder().withName("jtickzovguzpr")) - .withTable("datawisp") - .withDatabase("datagdblwj"); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeDataset.class); - Assertions.assertEquals("aqxaxtuxi", model.description()); - Assertions.assertEquals("itn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tth").type()); - Assertions.assertEquals("jtickzovguzpr", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java deleted file mode 100644 index 8d8194f46593..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureDatabricksDeltaLakeDatasetTypeProperties; - -public final class AzureDatabricksDeltaLakeDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeDatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"databbbjoypplod\",\"database\":\"datarbkpozffs\"}") - .toObject(AzureDatabricksDeltaLakeDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeDatasetTypeProperties model - = new AzureDatabricksDeltaLakeDatasetTypeProperties().withTable("databbbjoypplod") - .withDatabase("datarbkpozffs"); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java deleted file mode 100644 index 6df4200e2376..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeExportCommandTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeExportCommand; - -public final class AzureDatabricksDeltaLakeExportCommandTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeExportCommand model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeExportCommand\",\"dateFormat\":\"datayjlyxduxhopyavcb\",\"timestampFormat\":\"dataembvfa\",\"\":{\"xzrycvacspzj\":\"datajoazyxmumfbk\"}}") - .toObject(AzureDatabricksDeltaLakeExportCommand.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeExportCommand model - = new AzureDatabricksDeltaLakeExportCommand().withDateFormat("datayjlyxduxhopyavcb") - .withTimestampFormat("dataembvfa"); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeExportCommand.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java deleted file mode 100644 index 0aa77eea6090..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeImportCommandTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeImportCommand; - -public final class AzureDatabricksDeltaLakeImportCommandTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeImportCommand model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeImportCommand\",\"dateFormat\":\"databii\",\"timestampFormat\":\"databkxiujaagfeiwuux\",\"\":{\"qj\":\"datazmsivqegmpfzbrh\",\"ttsz\":\"datatkrsjspziiev\",\"d\":\"dataauyxyoyjas\"}}") - .toObject(AzureDatabricksDeltaLakeImportCommand.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeImportCommand model - = new AzureDatabricksDeltaLakeImportCommand().withDateFormat("databii") - .withTimestampFormat("databkxiujaagfeiwuux"); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeImportCommand.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java deleted file mode 100644 index 6827898ada31..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSinkTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeImportCommand; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeSink; - -public final class AzureDatabricksDeltaLakeSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeSink model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeSink\",\"preCopyScript\":\"datayphxeoqma\",\"importSettings\":{\"type\":\"AzureDatabricksDeltaLakeImportCommand\",\"dateFormat\":\"datakce\",\"timestampFormat\":\"datauvosbba\",\"\":{\"ipqrtnkngjnhx\":\"dataqooxvprqlxqhq\",\"cskif\":\"dataf\",\"tped\":\"dataj\",\"qqoz\":\"datahfpfsesiywcrejtp\"}},\"writeBatchSize\":\"datasbpqwnmfjktdvdh\",\"writeBatchTimeout\":\"dataztaluuup\",\"sinkRetryCount\":\"dataaoatzvajwvxh\",\"sinkRetryWait\":\"datamotulhilmazgp\",\"maxConcurrentConnections\":\"datarppsoeo\",\"disableMetricsCollection\":\"dataywtyeh\",\"\":{\"dxsgwd\":\"datagqeplyos\",\"gkv\":\"dataoxjlvvvzpjjvyin\",\"eldnmb\":\"datao\"}}") - .toObject(AzureDatabricksDeltaLakeSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeSink model - = new AzureDatabricksDeltaLakeSink().withWriteBatchSize("datasbpqwnmfjktdvdh") - .withWriteBatchTimeout("dataztaluuup") - .withSinkRetryCount("dataaoatzvajwvxh") - .withSinkRetryWait("datamotulhilmazgp") - .withMaxConcurrentConnections("datarppsoeo") - .withDisableMetricsCollection("dataywtyeh") - .withPreCopyScript("datayphxeoqma") - .withImportSettings(new AzureDatabricksDeltaLakeImportCommand().withDateFormat("datakce") - .withTimestampFormat("datauvosbba")); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java deleted file mode 100644 index 56921422b8d5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureDatabricksDeltaLakeSourceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeExportCommand; -import com.azure.resourcemanager.datafactory.models.AzureDatabricksDeltaLakeSource; - -public final class AzureDatabricksDeltaLakeSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureDatabricksDeltaLakeSource model = BinaryData.fromString( - "{\"type\":\"AzureDatabricksDeltaLakeSource\",\"query\":\"datanbccffsbz\",\"exportSettings\":{\"type\":\"AzureDatabricksDeltaLakeExportCommand\",\"dateFormat\":\"dataf\",\"timestampFormat\":\"datal\",\"\":{\"chpzv\":\"dataoudjcttav\",\"lferjwhonn\":\"dataz\",\"nicqulfqttfq\":\"dataude\"}},\"sourceRetryCount\":\"dataoowgqooiphhsvsn\",\"sourceRetryWait\":\"datahkjiwfvet\",\"maxConcurrentConnections\":\"datareqvf\",\"disableMetricsCollection\":\"datart\",\"\":{\"wqal\":\"dataikqzd\",\"cisolkwipvls\":\"datapmiytpji\",\"fclehlopipvpe\":\"datajutawgylnod\"}}") - .toObject(AzureDatabricksDeltaLakeSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureDatabricksDeltaLakeSource model - = new AzureDatabricksDeltaLakeSource().withSourceRetryCount("dataoowgqooiphhsvsn") - .withSourceRetryWait("datahkjiwfvet") - .withMaxConcurrentConnections("datareqvf") - .withDisableMetricsCollection("datart") - .withQuery("datanbccffsbz") - .withExportSettings( - new AzureDatabricksDeltaLakeExportCommand().withDateFormat("dataf").withTimestampFormat("datal")); - model = BinaryData.fromObject(model).toObject(AzureDatabricksDeltaLakeSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java deleted file mode 100644 index ffd3f61ce89a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageLocationTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureFileStorageLocation; - -public final class AzureFileStorageLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureFileStorageLocation model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageLocation\",\"folderPath\":\"dataxql\",\"fileName\":\"dataolqownki\",\"\":{\"lmsoodtmve\":\"dataewnahwkxjjmzt\"}}") - .toObject(AzureFileStorageLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureFileStorageLocation model - = new AzureFileStorageLocation().withFolderPath("dataxql").withFileName("dataolqownki"); - model = BinaryData.fromObject(model).toObject(AzureFileStorageLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java deleted file mode 100644 index 7e7dc25fa04e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageReadSettingsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureFileStorageReadSettings; - -public final class AzureFileStorageReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureFileStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageReadSettings\",\"recursive\":\"dataewcrblksddb\",\"wildcardFolderPath\":\"datagvjhoezjikwsuxrs\",\"wildcardFileName\":\"datat\",\"prefix\":\"datauypdcgx\",\"fileListPath\":\"databwvuwbbzcmp\",\"enablePartitionDiscovery\":\"datamsg\",\"partitionRootPath\":\"dataksmrgjfyfmj\",\"deleteFilesAfterCompletion\":\"datactybhsg\",\"modifiedDatetimeStart\":\"dataumkts\",\"modifiedDatetimeEnd\":\"dataeyoncxjevmawxrgw\",\"maxConcurrentConnections\":\"datan\",\"disableMetricsCollection\":\"datahhfhihwwkhi\",\"\":{\"uttyi\":\"dataykxzc\",\"jz\":\"dataps\",\"anzpzmo\":\"datavojwjanzolqq\",\"yowisqfifmit\":\"dataupqlusxkzdnot\"}}") - .toObject(AzureFileStorageReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureFileStorageReadSettings model = new AzureFileStorageReadSettings().withMaxConcurrentConnections("datan") - .withDisableMetricsCollection("datahhfhihwwkhi") - .withRecursive("dataewcrblksddb") - .withWildcardFolderPath("datagvjhoezjikwsuxrs") - .withWildcardFileName("datat") - .withPrefix("datauypdcgx") - .withFileListPath("databwvuwbbzcmp") - .withEnablePartitionDiscovery("datamsg") - .withPartitionRootPath("dataksmrgjfyfmj") - .withDeleteFilesAfterCompletion("datactybhsg") - .withModifiedDatetimeStart("dataumkts") - .withModifiedDatetimeEnd("dataeyoncxjevmawxrgw"); - model = BinaryData.fromObject(model).toObject(AzureFileStorageReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java deleted file mode 100644 index 276a4408d5b7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFileStorageWriteSettingsTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureFileStorageWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class AzureFileStorageWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureFileStorageWriteSettings model = BinaryData.fromString( - "{\"type\":\"AzureFileStorageWriteSettings\",\"maxConcurrentConnections\":\"datajuingnfunhtzgxs\",\"disableMetricsCollection\":\"datawmopk\",\"copyBehavior\":\"datahjztbw\",\"metadata\":[{\"name\":\"dataz\",\"value\":\"databhznhqzdbzl\"}],\"\":{\"vg\":\"databekvprkwpvxieqc\",\"aeiuex\":\"datashfafbeh\",\"ecrizkabafdlsiza\":\"datapgrmwdwlraeplpf\"}}") - .toObject(AzureFileStorageWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureFileStorageWriteSettings model - = new AzureFileStorageWriteSettings().withMaxConcurrentConnections("datajuingnfunhtzgxs") - .withDisableMetricsCollection("datawmopk") - .withCopyBehavior("datahjztbw") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataz").withValue("databhznhqzdbzl"))); - model = BinaryData.fromObject(model).toObject(AzureFileStorageWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java deleted file mode 100644 index b014e7b9fe51..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTests.java +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AzureFunctionActivity; -import com.azure.resourcemanager.datafactory.models.AzureFunctionActivityMethod; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureFunctionActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureFunctionActivity model = BinaryData.fromString( - "{\"type\":\"AzureFunctionActivity\",\"typeProperties\":{\"method\":\"DELETE\",\"functionName\":\"datailbiwacxldho\",\"headers\":{\"shtlqhik\":\"datapwxhccvtbgznpxax\",\"tmxwmjaevwid\":\"datafzdlhpzobhnuzia\"},\"body\":\"datapfku\"},\"linkedServiceName\":{\"referenceName\":\"wdir\",\"parameters\":{\"h\":\"dataraqyavdykxgcfh\"}},\"policy\":{\"timeout\":\"datayhzys\",\"retry\":\"dataqfbycra\",\"retryIntervalInSeconds\":1509141916,\"secureInput\":true,\"secureOutput\":true,\"\":{\"pxhdefyditb\":\"datahjbfoemmjtstlgdv\",\"ihl\":\"datamvavjyqhcowou\",\"gjb\":\"datanyv\"}},\"name\":\"hjpsgprlmpz\",\"description\":\"iakyflryhvph\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"zbpfwlxx\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Completed\"],\"\":{\"gcazyni\":\"datatmlhrzi\",\"hmxmjm\":\"datakkengowcut\",\"kqdqn\":\"dataouichoiimennxvqj\"}},{\"activity\":\"efszuu\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Skipped\",\"Skipped\"],\"\":{\"yxqfkrvm\":\"dataheqllrpc\",\"tlsipedgtupkm\":\"datadqhagehohqe\"}},{\"activity\":\"xeubngwidgxypdo\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Completed\",\"Completed\"],\"\":{\"ybl\":\"datavyhmi\"}}],\"userProperties\":[{\"name\":\"k\",\"value\":\"datatvoprgcsjycorxib\"},{\"name\":\"sfxkudic\",\"value\":\"datafoxenmuevq\"},{\"name\":\"ssclgolbpw\",\"value\":\"datagzdionlgnes\"}],\"\":{\"zskvp\":\"datanhfd\",\"hlbxrqbi\":\"dataqxnd\",\"zkehfkpoczxm\":\"datajhaafvxxi\"}}") - .toObject(AzureFunctionActivity.class); - Assertions.assertEquals("hjpsgprlmpz", model.name()); - Assertions.assertEquals("iakyflryhvph", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("zbpfwlxx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("k", model.userProperties().get(0).name()); - Assertions.assertEquals("wdir", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1509141916, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(AzureFunctionActivityMethod.DELETE, model.method()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureFunctionActivity model = new AzureFunctionActivity().withName("hjpsgprlmpz") - .withDescription("iakyflryhvph") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("zbpfwlxx") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("efszuu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xeubngwidgxypdo") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("k").withValue("datatvoprgcsjycorxib"), - new UserProperty().withName("sfxkudic").withValue("datafoxenmuevq"), - new UserProperty().withName("ssclgolbpw").withValue("datagzdionlgnes"))) - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("wdir").withParameters(mapOf("h", "dataraqyavdykxgcfh"))) - .withPolicy(new ActivityPolicy().withTimeout("datayhzys") - .withRetry("dataqfbycra") - .withRetryIntervalInSeconds(1509141916) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withMethod(AzureFunctionActivityMethod.DELETE) - .withFunctionName("datailbiwacxldho") - .withHeaders(mapOf("shtlqhik", "datapwxhccvtbgznpxax", "tmxwmjaevwid", "datafzdlhpzobhnuzia")) - .withBody("datapfku"); - model = BinaryData.fromObject(model).toObject(AzureFunctionActivity.class); - Assertions.assertEquals("hjpsgprlmpz", model.name()); - Assertions.assertEquals("iakyflryhvph", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("zbpfwlxx", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("k", model.userProperties().get(0).name()); - Assertions.assertEquals("wdir", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1509141916, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(AzureFunctionActivityMethod.DELETE, model.method()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java deleted file mode 100644 index 4067033c3ffc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureFunctionActivityTypePropertiesTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureFunctionActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.AzureFunctionActivityMethod; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureFunctionActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureFunctionActivityTypeProperties model = BinaryData.fromString( - "{\"method\":\"HEAD\",\"functionName\":\"datakrwihbyufmuinhq\",\"headers\":{\"huxzdgoto\":\"dataowxd\"},\"body\":\"datasduirjqxkna\"}") - .toObject(AzureFunctionActivityTypeProperties.class); - Assertions.assertEquals(AzureFunctionActivityMethod.HEAD, model.method()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureFunctionActivityTypeProperties model - = new AzureFunctionActivityTypeProperties().withMethod(AzureFunctionActivityMethod.HEAD) - .withFunctionName("datakrwihbyufmuinhq") - .withHeaders(mapOf("huxzdgoto", "dataowxd")) - .withBody("datasduirjqxkna"); - model = BinaryData.fromObject(model).toObject(AzureFunctionActivityTypeProperties.class); - Assertions.assertEquals(AzureFunctionActivityMethod.HEAD, model.method()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java deleted file mode 100644 index a258bc9bf081..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTests.java +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AzureMLBatchExecutionActivity; -import com.azure.resourcemanager.datafactory.models.AzureMLWebServiceFile; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLBatchExecutionActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLBatchExecutionActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLBatchExecution\",\"typeProperties\":{\"globalParameters\":{\"xyvxdbuzdphog\":\"datamrcxugatv\",\"rasdrrfozzv\":\"datarcmgue\"},\"webServiceOutputs\":{\"uvxx\":{\"filePath\":\"datalzjnjkbmfcry\",\"linkedServiceName\":{\"referenceName\":\"vcabrkrjmzqn\",\"parameters\":{\"pvdxutcoqclypb\":\"dataloozah\",\"jxitppe\":\"datanjorpcrg\"}}},\"rjtyhth\":{\"filePath\":\"dataoqrdefhb\",\"linkedServiceName\":{\"referenceName\":\"ict\",\"parameters\":{\"vmwjuqchcooty\":\"datagbgenwesxzuklz\",\"hkvkwdtbv\":\"datacarjmhiewvcpys\",\"tprxtfwvngwcsn\":\"dataclgkzby\",\"jlgwzbrggntqp\":\"dataakglygeuoolywjvd\"}}}},\"webServiceInputs\":{\"sqcwjxatghuixc\":{\"filePath\":\"datazdnhvkhkubpo\",\"linkedServiceName\":{\"referenceName\":\"hdxchaogawtvrnw\",\"parameters\":{\"rtquxltekix\":\"datafhiqliulfxgz\",\"ozsodp\":\"datauhca\"}}},\"zyr\":{\"filePath\":\"dataycifdr\",\"linkedServiceName\":{\"referenceName\":\"rywribmeuukkonw\",\"parameters\":{\"bwmiap\":\"datamhpjmnxlfkmdwzgb\",\"p\":\"datamrpbmxmxshfh\",\"oqnytuc\":\"dataqimjnxpfvxyt\"}}},\"anlyccdmkp\":{\"filePath\":\"datagdebsinsoybegej\",\"linkedServiceName\":{\"referenceName\":\"pouhlhludimq\",\"parameters\":{\"j\":\"datagxrozcfcxks\",\"gepmnxva\":\"databteakdrh\",\"cnlphlkx\":\"dataq\"}}},\"mdlynlhsdtc\":{\"filePath\":\"datahgatqzded\",\"linkedServiceName\":{\"referenceName\":\"zd\",\"parameters\":{\"ggfzpst\":\"datadn\",\"mxnmx\":\"datamdmwsflrdyrxloxa\"}}}}},\"linkedServiceName\":{\"referenceName\":\"levndl\",\"parameters\":{\"wf\":\"datarfcfl\",\"izloyqjrkted\":\"datajyuhuthqdfi\",\"jsogesrmah\":\"datauqve\"}},\"policy\":{\"timeout\":\"datacfbp\",\"retry\":\"dataelbzwxxsowd\",\"retryIntervalInSeconds\":11883213,\"secureInput\":true,\"secureOutput\":false,\"\":{\"eisboeapsr\":\"datahqf\",\"lpuokmakkw\":\"datay\"}},\"name\":\"rkaymdgzbklioku\",\"description\":\"rpamavxorldubbba\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"laooldwdjermdz\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"xkw\":\"dataeqgkutkcczbuob\",\"vsvywnzliqvqbv\":\"datahk\",\"qukegkludfdh\":\"dataihnas\",\"tsa\":\"dataorihqzfjyqadtq\"}}],\"userProperties\":[{\"name\":\"favplywtgilhxa\",\"value\":\"datafnuufe\"},{\"name\":\"pztoktnfe\",\"value\":\"datahc\"},{\"name\":\"xigexqyroq\",\"value\":\"datalgvyceuywuioim\"}],\"\":{\"if\":\"datayznlha\",\"zqjqbwjiqru\":\"datakgxfmdpsreqorpku\",\"doslvfdvbslrhcce\":\"databjuakdsmwajalsen\"}}") - .toObject(AzureMLBatchExecutionActivity.class); - Assertions.assertEquals("rkaymdgzbklioku", model.name()); - Assertions.assertEquals("rpamavxorldubbba", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("laooldwdjermdz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("favplywtgilhxa", model.userProperties().get(0).name()); - Assertions.assertEquals("levndl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(11883213, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("vcabrkrjmzqn", - model.webServiceOutputs().get("uvxx").linkedServiceName().referenceName()); - Assertions.assertEquals("hdxchaogawtvrnw", - model.webServiceInputs().get("sqcwjxatghuixc").linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLBatchExecutionActivity model = new AzureMLBatchExecutionActivity().withName("rkaymdgzbklioku") - .withDescription("rpamavxorldubbba") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("laooldwdjermdz") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("favplywtgilhxa").withValue("datafnuufe"), - new UserProperty().withName("pztoktnfe").withValue("datahc"), - new UserProperty().withName("xigexqyroq").withValue("datalgvyceuywuioim"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("levndl") - .withParameters(mapOf("wf", "datarfcfl", "izloyqjrkted", "datajyuhuthqdfi", "jsogesrmah", "datauqve"))) - .withPolicy(new ActivityPolicy().withTimeout("datacfbp") - .withRetry("dataelbzwxxsowd") - .withRetryIntervalInSeconds(11883213) - .withSecureInput(true) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withGlobalParameters(mapOf("xyvxdbuzdphog", "datamrcxugatv", "rasdrrfozzv", "datarcmgue")) - .withWebServiceOutputs(mapOf("uvxx", - new AzureMLWebServiceFile().withFilePath("datalzjnjkbmfcry") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vcabrkrjmzqn") - .withParameters(mapOf("pvdxutcoqclypb", "dataloozah", "jxitppe", "datanjorpcrg"))), - "rjtyhth", - new AzureMLWebServiceFile().withFilePath("dataoqrdefhb") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ict") - .withParameters(mapOf("vmwjuqchcooty", "datagbgenwesxzuklz", "hkvkwdtbv", "datacarjmhiewvcpys", - "tprxtfwvngwcsn", "dataclgkzby", "jlgwzbrggntqp", "dataakglygeuoolywjvd"))))) - .withWebServiceInputs(mapOf("sqcwjxatghuixc", - new AzureMLWebServiceFile().withFilePath("datazdnhvkhkubpo") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hdxchaogawtvrnw") - .withParameters(mapOf("rtquxltekix", "datafhiqliulfxgz", "ozsodp", "datauhca"))), - "zyr", - new AzureMLWebServiceFile().withFilePath("dataycifdr") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rywribmeuukkonw") - .withParameters(mapOf("bwmiap", "datamhpjmnxlfkmdwzgb", "p", "datamrpbmxmxshfh", "oqnytuc", - "dataqimjnxpfvxyt"))), - "anlyccdmkp", - new AzureMLWebServiceFile().withFilePath("datagdebsinsoybegej") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pouhlhludimq") - .withParameters( - mapOf("j", "datagxrozcfcxks", "gepmnxva", "databteakdrh", "cnlphlkx", "dataq"))), - "mdlynlhsdtc", - new AzureMLWebServiceFile().withFilePath("datahgatqzded") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zd") - .withParameters(mapOf("ggfzpst", "datadn", "mxnmx", "datamdmwsflrdyrxloxa"))))); - model = BinaryData.fromObject(model).toObject(AzureMLBatchExecutionActivity.class); - Assertions.assertEquals("rkaymdgzbklioku", model.name()); - Assertions.assertEquals("rpamavxorldubbba", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("laooldwdjermdz", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("favplywtgilhxa", model.userProperties().get(0).name()); - Assertions.assertEquals("levndl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(11883213, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("vcabrkrjmzqn", - model.webServiceOutputs().get("uvxx").linkedServiceName().referenceName()); - Assertions.assertEquals("hdxchaogawtvrnw", - model.webServiceInputs().get("sqcwjxatghuixc").linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java deleted file mode 100644 index d5d9b971e398..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLBatchExecutionActivityTypePropertiesTests.java +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureMLBatchExecutionActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.AzureMLWebServiceFile; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLBatchExecutionActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLBatchExecutionActivityTypeProperties model = BinaryData.fromString( - "{\"globalParameters\":{\"zwbb\":\"datajsju\",\"vfukuhtd\":\"datavx\",\"jc\":\"dataxidmit\"},\"webServiceOutputs\":{\"iadygoad\":{\"filePath\":\"datav\",\"linkedServiceName\":{\"referenceName\":\"ftmpjinrq\",\"parameters\":{\"sscngduew\":\"dataqxahp\",\"qires\":\"datavhcwt\",\"equocawcb\":\"dataxigpmc\",\"btxzaaav\":\"datanyljycpwh\"}}},\"uscdnneofta\":{\"filePath\":\"datadxdxrkrvmhhgvrxv\",\"linkedServiceName\":{\"referenceName\":\"uwbvrbwafw\",\"parameters\":{\"gwfgvpftbwmuxcpy\":\"datatnc\",\"mghhzm\":\"databvf\",\"pywvgfdsrng\":\"dataptkbe\",\"ksnnykss\":\"dataqvxzqwcmmolpfcv\"}}}},\"webServiceInputs\":{\"mgvqthlimvyzrdq\":{\"filePath\":\"datahttj\",\"linkedServiceName\":{\"referenceName\":\"qwwlaxhsjwpcjtw\",\"parameters\":{\"avxi\":\"datarzntmzz\",\"ukytteai\":\"datakexspoiq\",\"igdvcbyldsmy\":\"dataywopkovlwm\"}}},\"acjoaixhmaokkgvw\":{\"filePath\":\"datagyon\",\"linkedServiceName\":{\"referenceName\":\"rpxwldktphnis\",\"parameters\":{\"fitpxpkba\":\"datajnbt\",\"bbyqvbd\":\"datagj\",\"ghalmscoggam\":\"datafzkujgeppxiyo\"}}},\"p\":{\"filePath\":\"datalqqnfdwrwscyblw\",\"linkedServiceName\":{\"referenceName\":\"hpibgalefjsgn\",\"parameters\":{\"nshnb\":\"datamvzcibqyp\",\"s\":\"datadw\"}}}}}") - .toObject(AzureMLBatchExecutionActivityTypeProperties.class); - Assertions.assertEquals("ftmpjinrq", - model.webServiceOutputs().get("iadygoad").linkedServiceName().referenceName()); - Assertions.assertEquals("qwwlaxhsjwpcjtw", - model.webServiceInputs().get("mgvqthlimvyzrdq").linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLBatchExecutionActivityTypeProperties model - = new AzureMLBatchExecutionActivityTypeProperties() - .withGlobalParameters(mapOf("zwbb", "datajsju", "vfukuhtd", "datavx", "jc", "dataxidmit")) - .withWebServiceOutputs(mapOf("iadygoad", - new AzureMLWebServiceFile().withFilePath("datav") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ftmpjinrq") - .withParameters(mapOf("sscngduew", "dataqxahp", "qires", "datavhcwt", "equocawcb", - "dataxigpmc", "btxzaaav", "datanyljycpwh"))), - "uscdnneofta", - new AzureMLWebServiceFile().withFilePath("datadxdxrkrvmhhgvrxv") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uwbvrbwafw") - .withParameters(mapOf("gwfgvpftbwmuxcpy", "datatnc", "mghhzm", "databvf", "pywvgfdsrng", - "dataptkbe", "ksnnykss", "dataqvxzqwcmmolpfcv"))))) - .withWebServiceInputs(mapOf("mgvqthlimvyzrdq", - new AzureMLWebServiceFile().withFilePath("datahttj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qwwlaxhsjwpcjtw") - .withParameters(mapOf("avxi", "datarzntmzz", "ukytteai", "datakexspoiq", "igdvcbyldsmy", - "dataywopkovlwm"))), - "acjoaixhmaokkgvw", - new AzureMLWebServiceFile().withFilePath("datagyon") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rpxwldktphnis") - .withParameters(mapOf("fitpxpkba", "datajnbt", "bbyqvbd", "datagj", "ghalmscoggam", - "datafzkujgeppxiyo"))), - "p", - new AzureMLWebServiceFile().withFilePath("datalqqnfdwrwscyblw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hpibgalefjsgn") - .withParameters(mapOf("nshnb", "datamvzcibqyp", "s", "datadw"))))); - model = BinaryData.fromObject(model).toObject(AzureMLBatchExecutionActivityTypeProperties.class); - Assertions.assertEquals("ftmpjinrq", - model.webServiceOutputs().get("iadygoad").linkedServiceName().referenceName()); - Assertions.assertEquals("qwwlaxhsjwpcjtw", - model.webServiceInputs().get("mgvqthlimvyzrdq").linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java deleted file mode 100644 index 0a9109487b5a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTests.java +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AzureMLExecutePipelineActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLExecutePipelineActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLExecutePipelineActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLExecutePipeline\",\"typeProperties\":{\"mlPipelineId\":\"datajghfaldx\",\"mlPipelineEndpointId\":\"datarlbbpkjseftv\",\"version\":\"datahfmaknonaqyes\",\"experimentName\":\"datanoecwabuf\",\"mlPipelineParameters\":\"datal\",\"dataPathAssignments\":\"datakbb\",\"mlParentRunId\":\"dataayvkmptgpqx\",\"continueOnStepFailure\":\"datallamdzozj\"},\"linkedServiceName\":{\"referenceName\":\"mkdboesxpcbg\",\"parameters\":{\"losxtbdgpy\":\"dataahbcygg\"}},\"policy\":{\"timeout\":\"databd\",\"retry\":\"datalimacz\",\"retryIntervalInSeconds\":1188904597,\"secureInput\":false,\"secureOutput\":false,\"\":{\"yarc\":\"datah\",\"btwp\":\"datampxdyyoh\",\"bvxum\":\"datagcvuemjcjejal\"}},\"name\":\"xuvdoteidcw\",\"description\":\"dqqgtwegqmlviy\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"uzmzgat\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"mcukzwz\":\"datasaerzc\",\"zzrxgqxddvuiur\":\"datai\",\"jyjcshmtpdvuix\":\"datawbvyraazscxi\",\"vckfivia\":\"datakmybohax\"}},{\"activity\":\"qnnmcdqzgepjyppk\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Completed\",\"Failed\"],\"\":{\"yu\":\"datayfsv\",\"jnakqcsgo\":\"dataqcuz\",\"ieitp\":\"datazyxu\"}},{\"activity\":\"kjyjhkrk\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\",\"Skipped\"],\"\":{\"jjiuirmcupbehq\":\"dataqpjnqyylkcbk\"}},{\"activity\":\"mhqihlxdh\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"bwhawref\":\"datavctmpxnbnho\"}}],\"userProperties\":[{\"name\":\"ttzlo\",\"value\":\"datat\"}],\"\":{\"bcwfp\":\"databxnqkbvhdbg\",\"tcucfbr\":\"datavmixfqqm\"}}") - .toObject(AzureMLExecutePipelineActivity.class); - Assertions.assertEquals("xuvdoteidcw", model.name()); - Assertions.assertEquals("dqqgtwegqmlviy", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("uzmzgat", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ttzlo", model.userProperties().get(0).name()); - Assertions.assertEquals("mkdboesxpcbg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1188904597, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLExecutePipelineActivity model = new AzureMLExecutePipelineActivity().withName("xuvdoteidcw") - .withDescription("dqqgtwegqmlviy") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("uzmzgat") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qnnmcdqzgepjyppk") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kjyjhkrk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mhqihlxdh") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ttzlo").withValue("datat"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mkdboesxpcbg") - .withParameters(mapOf("losxtbdgpy", "dataahbcygg"))) - .withPolicy(new ActivityPolicy().withTimeout("databd") - .withRetry("datalimacz") - .withRetryIntervalInSeconds(1188904597) - .withSecureInput(false) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withMlPipelineId("datajghfaldx") - .withMlPipelineEndpointId("datarlbbpkjseftv") - .withVersion("datahfmaknonaqyes") - .withExperimentName("datanoecwabuf") - .withMlPipelineParameters("datal") - .withDataPathAssignments("datakbb") - .withMlParentRunId("dataayvkmptgpqx") - .withContinueOnStepFailure("datallamdzozj"); - model = BinaryData.fromObject(model).toObject(AzureMLExecutePipelineActivity.class); - Assertions.assertEquals("xuvdoteidcw", model.name()); - Assertions.assertEquals("dqqgtwegqmlviy", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("uzmzgat", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ttzlo", model.userProperties().get(0).name()); - Assertions.assertEquals("mkdboesxpcbg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1188904597, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java deleted file mode 100644 index 2f62f20b8fbb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLExecutePipelineActivityTypePropertiesTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureMLExecutePipelineActivityTypeProperties; - -public final class AzureMLExecutePipelineActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLExecutePipelineActivityTypeProperties model = BinaryData.fromString( - "{\"mlPipelineId\":\"datapdtzugwurvpcwy\",\"mlPipelineEndpointId\":\"dataqikouravdqe\",\"version\":\"datawgpmademlo\",\"experimentName\":\"dataayk\",\"mlPipelineParameters\":\"datakk\",\"dataPathAssignments\":\"datafhgdvg\",\"mlParentRunId\":\"dataun\",\"continueOnStepFailure\":\"datatz\"}") - .toObject(AzureMLExecutePipelineActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLExecutePipelineActivityTypeProperties model - = new AzureMLExecutePipelineActivityTypeProperties().withMlPipelineId("datapdtzugwurvpcwy") - .withMlPipelineEndpointId("dataqikouravdqe") - .withVersion("datawgpmademlo") - .withExperimentName("dataayk") - .withMlPipelineParameters("datakk") - .withDataPathAssignments("datafhgdvg") - .withMlParentRunId("dataun") - .withContinueOnStepFailure("datatz"); - model = BinaryData.fromObject(model).toObject(AzureMLExecutePipelineActivityTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java deleted file mode 100644 index 9e2450278576..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTests.java +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.AzureMLUpdateResourceActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLUpdateResourceActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLUpdateResourceActivity model = BinaryData.fromString( - "{\"type\":\"AzureMLUpdateResource\",\"typeProperties\":{\"trainedModelName\":\"datahjorguifchvrgbmn\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"kqejrh\",\"parameters\":{\"cfxbywpw\":\"datazzdlfayich\"}},\"trainedModelFilePath\":\"datavpglstxznkbj\"},\"linkedServiceName\":{\"referenceName\":\"e\",\"parameters\":{\"ocwbcxwdbx\":\"datarddygpdnnvep\"}},\"policy\":{\"timeout\":\"datapummphb\",\"retry\":\"datariv\",\"retryIntervalInSeconds\":100885789,\"secureInput\":true,\"secureOutput\":true,\"\":{\"xzm\":\"dataenaj\",\"ljl\":\"datapnersmevhgsuq\",\"zsyqpkpvb\":\"datarjqakb\",\"gyguqyxvzyi\":\"datag\"}},\"name\":\"gzeio\",\"description\":\"cngiaadg\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"teidfzof\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"ucsgquphqnuitumx\":\"datacmlomlnprkiky\"}},{\"activity\":\"vemogab\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"jdmdb\":\"datajfkcmzanru\",\"t\":\"datab\",\"jlaxeqehg\":\"dataqiuohi\",\"gb\":\"datajgvrawjom\"}},{\"activity\":\"dwfyagvhe\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Completed\"],\"\":{\"z\":\"datawpcupejz\"}}],\"userProperties\":[{\"name\":\"pxxgvcsvtflcjxmt\",\"value\":\"dataexapfypdfierut\"},{\"name\":\"edeygsrrgdimaqy\",\"value\":\"datasahv\"},{\"name\":\"wlibrwomdwzz\",\"value\":\"datacyrkcdo\"}],\"\":{\"aitihncysa\":\"datag\",\"ora\":\"datajlq\",\"ajlptydvebipkeo\":\"datatbiskkceb\"}}") - .toObject(AzureMLUpdateResourceActivity.class); - Assertions.assertEquals("gzeio", model.name()); - Assertions.assertEquals("cngiaadg", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("teidfzof", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("pxxgvcsvtflcjxmt", model.userProperties().get(0).name()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(100885789, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("kqejrh", model.trainedModelLinkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLUpdateResourceActivity model = new AzureMLUpdateResourceActivity().withName("gzeio") - .withDescription("cngiaadg") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("teidfzof") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vemogab") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dwfyagvhe") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("pxxgvcsvtflcjxmt").withValue("dataexapfypdfierut"), - new UserProperty().withName("edeygsrrgdimaqy").withValue("datasahv"), - new UserProperty().withName("wlibrwomdwzz").withValue("datacyrkcdo"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("ocwbcxwdbx", "datarddygpdnnvep"))) - .withPolicy(new ActivityPolicy().withTimeout("datapummphb") - .withRetry("datariv") - .withRetryIntervalInSeconds(100885789) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withTrainedModelName("datahjorguifchvrgbmn") - .withTrainedModelLinkedServiceName(new LinkedServiceReference().withReferenceName("kqejrh") - .withParameters(mapOf("cfxbywpw", "datazzdlfayich"))) - .withTrainedModelFilePath("datavpglstxznkbj"); - model = BinaryData.fromObject(model).toObject(AzureMLUpdateResourceActivity.class); - Assertions.assertEquals("gzeio", model.name()); - Assertions.assertEquals("cngiaadg", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("teidfzof", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("pxxgvcsvtflcjxmt", model.userProperties().get(0).name()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(100885789, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("kqejrh", model.trainedModelLinkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java deleted file mode 100644 index 0c3dc93b81c5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLUpdateResourceActivityTypePropertiesTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureMLUpdateResourceActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLUpdateResourceActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLUpdateResourceActivityTypeProperties model = BinaryData.fromString( - "{\"trainedModelName\":\"dataixxiukghxde\",\"trainedModelLinkedServiceName\":{\"referenceName\":\"qptvxibpzhkn\",\"parameters\":{\"ss\":\"datavzqawjnw\"}},\"trainedModelFilePath\":\"dataubpfe\"}") - .toObject(AzureMLUpdateResourceActivityTypeProperties.class); - Assertions.assertEquals("qptvxibpzhkn", model.trainedModelLinkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLUpdateResourceActivityTypeProperties model - = new AzureMLUpdateResourceActivityTypeProperties().withTrainedModelName("dataixxiukghxde") - .withTrainedModelLinkedServiceName(new LinkedServiceReference().withReferenceName("qptvxibpzhkn") - .withParameters(mapOf("ss", "datavzqawjnw"))) - .withTrainedModelFilePath("dataubpfe"); - model = BinaryData.fromObject(model).toObject(AzureMLUpdateResourceActivityTypeProperties.class); - Assertions.assertEquals("qptvxibpzhkn", model.trainedModelLinkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java deleted file mode 100644 index 28e23c1d196f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMLWebServiceFileTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMLWebServiceFile; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMLWebServiceFileTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMLWebServiceFile model = BinaryData.fromString( - "{\"filePath\":\"dataskprgztzcib\",\"linkedServiceName\":{\"referenceName\":\"wqmfhg\",\"parameters\":{\"ph\":\"datasxoeb\"}}}") - .toObject(AzureMLWebServiceFile.class); - Assertions.assertEquals("wqmfhg", model.linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMLWebServiceFile model = new AzureMLWebServiceFile().withFilePath("dataskprgztzcib") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("wqmfhg").withParameters(mapOf("ph", "datasxoeb"))); - model = BinaryData.fromObject(model).toObject(AzureMLWebServiceFile.class); - Assertions.assertEquals("wqmfhg", model.linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java deleted file mode 100644 index 977c5b3ec72c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMariaDBSource; - -public final class AzureMariaDBSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMariaDBSource model = BinaryData.fromString( - "{\"type\":\"AzureMariaDBSource\",\"query\":\"datadkyscxzsynbdrqi\",\"queryTimeout\":\"dataihg\",\"additionalColumns\":\"datahyebwg\",\"sourceRetryCount\":\"dataovsvjxnsor\",\"sourceRetryWait\":\"datal\",\"maxConcurrentConnections\":\"datahlyhgiisnfaxtob\",\"disableMetricsCollection\":\"datafpyilojwc\",\"\":{\"mtyp\":\"dataywtauskish\"}}") - .toObject(AzureMariaDBSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMariaDBSource model = new AzureMariaDBSource().withSourceRetryCount("dataovsvjxnsor") - .withSourceRetryWait("datal") - .withMaxConcurrentConnections("datahlyhgiisnfaxtob") - .withDisableMetricsCollection("datafpyilojwc") - .withQueryTimeout("dataihg") - .withAdditionalColumns("datahyebwg") - .withQuery("datadkyscxzsynbdrqi"); - model = BinaryData.fromObject(model).toObject(AzureMariaDBSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java deleted file mode 100644 index a31b5ebc5124..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMariaDBTableDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMariaDBTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMariaDBTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMariaDBTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureMariaDBTable\",\"typeProperties\":{\"tableName\":\"datajrevmptedeuen\"},\"description\":\"shnfiygpgpkkhp\",\"structure\":\"dataglaqlmi\",\"schema\":\"datamtrdlpxiww\",\"linkedServiceName\":{\"referenceName\":\"ecpvfpnrzikvo\",\"parameters\":{\"bwbl\":\"dataeohyfivxdi\",\"qrxrosuqrrldxfu\":\"datajhpxukxgoyxontbw\",\"ewxatktwjrppi\":\"datae\",\"rmd\":\"dataeyrqve\"}},\"parameters\":{\"zwyncwksm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataksbojklwj\"},\"bfnflytf\":{\"type\":\"Bool\",\"defaultValue\":\"datayzo\"},\"kqwopws\":{\"type\":\"Object\",\"defaultValue\":\"datauiqoomis\"},\"dglkfv\":{\"type\":\"Float\",\"defaultValue\":\"datayznghuqzg\"}},\"annotations\":[\"datagavtfy\",\"datasedfmzu\",\"dataryxpi\"],\"folder\":{\"name\":\"peakf\"},\"\":{\"oqkanqtrkic\":\"dataedliklxkyod\",\"dkrwwmurhvifqe\":\"datahqyrgqm\",\"lpzjeldaqwjun\":\"datafsrnackitlwgeb\",\"wcbrds\":\"datalnij\"}}") - .toObject(AzureMariaDBTableDataset.class); - Assertions.assertEquals("shnfiygpgpkkhp", model.description()); - Assertions.assertEquals("ecpvfpnrzikvo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("zwyncwksm").type()); - Assertions.assertEquals("peakf", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMariaDBTableDataset model = new AzureMariaDBTableDataset().withDescription("shnfiygpgpkkhp") - .withStructure("dataglaqlmi") - .withSchema("datamtrdlpxiww") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ecpvfpnrzikvo") - .withParameters(mapOf("bwbl", "dataeohyfivxdi", "qrxrosuqrrldxfu", "datajhpxukxgoyxontbw", - "ewxatktwjrppi", "datae", "rmd", "dataeyrqve"))) - .withParameters(mapOf("zwyncwksm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataksbojklwj"), - "bfnflytf", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datayzo"), - "kqwopws", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datauiqoomis"), - "dglkfv", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayznghuqzg"))) - .withAnnotations(Arrays.asList("datagavtfy", "datasedfmzu", "dataryxpi")) - .withFolder(new DatasetFolder().withName("peakf")) - .withTableName("datajrevmptedeuen"); - model = BinaryData.fromObject(model).toObject(AzureMariaDBTableDataset.class); - Assertions.assertEquals("shnfiygpgpkkhp", model.description()); - Assertions.assertEquals("ecpvfpnrzikvo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("zwyncwksm").type()); - Assertions.assertEquals("peakf", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java deleted file mode 100644 index f4280a73826a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMySqlSink; - -public final class AzureMySqlSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMySqlSink model = BinaryData.fromString( - "{\"type\":\"AzureMySqlSink\",\"preCopyScript\":\"datagvf\",\"writeBatchSize\":\"dataqz\",\"writeBatchTimeout\":\"datavazkqkycgejgw\",\"sinkRetryCount\":\"datajybboq\",\"sinkRetryWait\":\"dataiycdvdgemymyddz\",\"maxConcurrentConnections\":\"dataxlvgslmgl\",\"disableMetricsCollection\":\"dataeyvag\",\"\":{\"grhrdnsgvsrtqlt\":\"dataqpaexlltmekkae\",\"wiwrubxeyralhb\":\"datawjkraleglpynsbl\",\"qt\":\"datawaltvkylwjo\"}}") - .toObject(AzureMySqlSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMySqlSink model = new AzureMySqlSink().withWriteBatchSize("dataqz") - .withWriteBatchTimeout("datavazkqkycgejgw") - .withSinkRetryCount("datajybboq") - .withSinkRetryWait("dataiycdvdgemymyddz") - .withMaxConcurrentConnections("dataxlvgslmgl") - .withDisableMetricsCollection("dataeyvag") - .withPreCopyScript("datagvf"); - model = BinaryData.fromObject(model).toObject(AzureMySqlSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java deleted file mode 100644 index b89d3bb5e065..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMySqlSource; - -public final class AzureMySqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMySqlSource model = BinaryData.fromString( - "{\"type\":\"AzureMySqlSource\",\"query\":\"dataagv\",\"queryTimeout\":\"dataixzcdaukhtwhhsc\",\"additionalColumns\":\"datamfoojkerduj\",\"sourceRetryCount\":\"databzam\",\"sourceRetryWait\":\"dataadutogbkdctsgval\",\"maxConcurrentConnections\":\"datacnecl\",\"disableMetricsCollection\":\"datamjsqcub\",\"\":{\"iteenaheecsft\":\"datansl\",\"jh\":\"datasubzfuhjnmdcyrbz\"}}") - .toObject(AzureMySqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMySqlSource model = new AzureMySqlSource().withSourceRetryCount("databzam") - .withSourceRetryWait("dataadutogbkdctsgval") - .withMaxConcurrentConnections("datacnecl") - .withDisableMetricsCollection("datamjsqcub") - .withQueryTimeout("dataixzcdaukhtwhhsc") - .withAdditionalColumns("datamfoojkerduj") - .withQuery("dataagv"); - model = BinaryData.fromObject(model).toObject(AzureMySqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java deleted file mode 100644 index 0449da04f854..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTests.java +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureMySqlTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureMySqlTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMySqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureMySqlTable\",\"typeProperties\":{\"tableName\":\"dataphkvyyz\",\"table\":\"datacrxylaypdt\"},\"description\":\"velffohuriw\",\"structure\":\"datadfrwpsshrm\",\"schema\":\"datacclpct\",\"linkedServiceName\":{\"referenceName\":\"ogkscxj\",\"parameters\":{\"nxs\":\"datamospoe\"}},\"parameters\":{\"la\":{\"type\":\"Object\",\"defaultValue\":\"datacuyf\"},\"xqpwnikxkcajgrbr\":{\"type\":\"SecureString\",\"defaultValue\":\"dataqhsujkafuzp\"},\"qsazmzlpcxi\":{\"type\":\"Object\",\"defaultValue\":\"datavgoo\"},\"ic\":{\"type\":\"Array\",\"defaultValue\":\"dataxxr\"}},\"annotations\":[\"datavwjdthkvpy\"],\"folder\":{\"name\":\"afinmp\"},\"\":{\"hqdcclcvqsr\":\"datapdkjrzfwkyluobdx\",\"ylcvwbzmfx\":\"datap\",\"fjxlpiy\":\"datary\"}}") - .toObject(AzureMySqlTableDataset.class); - Assertions.assertEquals("velffohuriw", model.description()); - Assertions.assertEquals("ogkscxj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("la").type()); - Assertions.assertEquals("afinmp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMySqlTableDataset model = new AzureMySqlTableDataset().withDescription("velffohuriw") - .withStructure("datadfrwpsshrm") - .withSchema("datacclpct") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("ogkscxj").withParameters(mapOf("nxs", "datamospoe"))) - .withParameters( - mapOf("la", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datacuyf"), - "xqpwnikxkcajgrbr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataqhsujkafuzp"), - "qsazmzlpcxi", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavgoo"), "ic", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataxxr"))) - .withAnnotations(Arrays.asList("datavwjdthkvpy")) - .withFolder(new DatasetFolder().withName("afinmp")) - .withTableName("dataphkvyyz") - .withTable("datacrxylaypdt"); - model = BinaryData.fromObject(model).toObject(AzureMySqlTableDataset.class); - Assertions.assertEquals("velffohuriw", model.description()); - Assertions.assertEquals("ogkscxj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("la").type()); - Assertions.assertEquals("afinmp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java deleted file mode 100644 index 028c6ecde3df..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureMySqlTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureMySqlTableDatasetTypeProperties; - -public final class AzureMySqlTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureMySqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datanpfydrfb\",\"table\":\"datanyxbyxmk\"}") - .toObject(AzureMySqlTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureMySqlTableDatasetTypeProperties model - = new AzureMySqlTableDatasetTypeProperties().withTableName("datanpfydrfb").withTable("datanyxbyxmk"); - model = BinaryData.fromObject(model).toObject(AzureMySqlTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java deleted file mode 100644 index 0c5e1156c3e9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzurePostgreSqlSink; - -public final class AzurePostgreSqlSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzurePostgreSqlSink model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlSink\",\"preCopyScript\":\"dataromeawthycbigpi\",\"writeBatchSize\":\"datapxhzjnparsulmuwl\",\"writeBatchTimeout\":\"dataakheox\",\"sinkRetryCount\":\"datagoavzycxpzat\",\"sinkRetryWait\":\"dataloo\",\"maxConcurrentConnections\":\"datatm\",\"disableMetricsCollection\":\"datatwzslrprftq\",\"\":{\"emhohxabmxoow\":\"dataouyqzhoi\",\"p\":\"dataoogozerccz\",\"uj\":\"datankgk\",\"j\":\"dataqqjqafjk\"}}") - .toObject(AzurePostgreSqlSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzurePostgreSqlSink model = new AzurePostgreSqlSink().withWriteBatchSize("datapxhzjnparsulmuwl") - .withWriteBatchTimeout("dataakheox") - .withSinkRetryCount("datagoavzycxpzat") - .withSinkRetryWait("dataloo") - .withMaxConcurrentConnections("datatm") - .withDisableMetricsCollection("datatwzslrprftq") - .withPreCopyScript("dataromeawthycbigpi"); - model = BinaryData.fromObject(model).toObject(AzurePostgreSqlSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java deleted file mode 100644 index 2df145e39c06..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzurePostgreSqlSource; - -public final class AzurePostgreSqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzurePostgreSqlSource model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlSource\",\"query\":\"datamenqcjjfxqtvsfs\",\"queryTimeout\":\"datayzgaweixnoblazw\",\"additionalColumns\":\"dataaczi\",\"sourceRetryCount\":\"dataduwlsovitpcsma\",\"sourceRetryWait\":\"datadxubhmiuxypvuaz\",\"maxConcurrentConnections\":\"datatbnekhjz\",\"disableMetricsCollection\":\"datab\",\"\":{\"pp\":\"dataqkuozarr\",\"bkvbcvoyqnr\":\"datazry\",\"woghbpzxkjqecj\":\"datadrctarvz\"}}") - .toObject(AzurePostgreSqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzurePostgreSqlSource model = new AzurePostgreSqlSource().withSourceRetryCount("dataduwlsovitpcsma") - .withSourceRetryWait("datadxubhmiuxypvuaz") - .withMaxConcurrentConnections("datatbnekhjz") - .withDisableMetricsCollection("datab") - .withQueryTimeout("datayzgaweixnoblazw") - .withAdditionalColumns("dataaczi") - .withQuery("datamenqcjjfxqtvsfs"); - model = BinaryData.fromObject(model).toObject(AzurePostgreSqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java deleted file mode 100644 index 7f71513be14e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzurePostgreSqlTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzurePostgreSqlTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzurePostgreSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzurePostgreSqlTable\",\"typeProperties\":{\"tableName\":\"dataxoqwbztilqbzbwbg\",\"table\":\"datahbhfljeyodpiovnl\",\"schema\":\"datawyakzuafapwxsvde\"},\"description\":\"jiobnirgoextqdn\",\"structure\":\"datagntimz\",\"schema\":\"datapbmtbsetkodsq\",\"linkedServiceName\":{\"referenceName\":\"hedaakghcrzmm\",\"parameters\":{\"mfetqjisjmo\":\"datavdhdgdiwmlgs\",\"vt\":\"datazcaqfkakhgk\",\"ejqaw\":\"dataycvytv\"}},\"parameters\":{\"zbdypbbimhjbozv\":{\"type\":\"Bool\",\"defaultValue\":\"datafzxk\"},\"jcyuzlybqsci\":{\"type\":\"Object\",\"defaultValue\":\"datatclnh\"}},\"annotations\":[\"datalkv\",\"datakcafnwqhawv\"],\"folder\":{\"name\":\"iulby\"},\"\":{\"umwhmxpuck\":\"datajzrycwpb\"}}") - .toObject(AzurePostgreSqlTableDataset.class); - Assertions.assertEquals("jiobnirgoextqdn", model.description()); - Assertions.assertEquals("hedaakghcrzmm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zbdypbbimhjbozv").type()); - Assertions.assertEquals("iulby", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzurePostgreSqlTableDataset model = new AzurePostgreSqlTableDataset().withDescription("jiobnirgoextqdn") - .withStructure("datagntimz") - .withSchema("datapbmtbsetkodsq") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hedaakghcrzmm") - .withParameters( - mapOf("mfetqjisjmo", "datavdhdgdiwmlgs", "vt", "datazcaqfkakhgk", "ejqaw", "dataycvytv"))) - .withParameters(mapOf("zbdypbbimhjbozv", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafzxk"), "jcyuzlybqsci", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datatclnh"))) - .withAnnotations(Arrays.asList("datalkv", "datakcafnwqhawv")) - .withFolder(new DatasetFolder().withName("iulby")) - .withTableName("dataxoqwbztilqbzbwbg") - .withTable("datahbhfljeyodpiovnl") - .withSchemaTypePropertiesSchema("datawyakzuafapwxsvde"); - model = BinaryData.fromObject(model).toObject(AzurePostgreSqlTableDataset.class); - Assertions.assertEquals("jiobnirgoextqdn", model.description()); - Assertions.assertEquals("hedaakghcrzmm", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zbdypbbimhjbozv").type()); - Assertions.assertEquals("iulby", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java deleted file mode 100644 index f72ebc870f16..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzurePostgreSqlTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzurePostgreSqlTableDatasetTypeProperties; - -public final class AzurePostgreSqlTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzurePostgreSqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataastlpsmgo\",\"table\":\"datac\",\"schema\":\"datarvlvvjmx\"}") - .toObject(AzurePostgreSqlTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzurePostgreSqlTableDatasetTypeProperties model - = new AzurePostgreSqlTableDatasetTypeProperties().withTableName("dataastlpsmgo") - .withTable("datac") - .withSchema("datarvlvvjmx"); - model = BinaryData.fromObject(model).toObject(AzurePostgreSqlTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java deleted file mode 100644 index 3e39b053220e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureQueueSinkTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureQueueSink; - -public final class AzureQueueSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureQueueSink model = BinaryData.fromString( - "{\"type\":\"AzureQueueSink\",\"writeBatchSize\":\"datahehhtltwv\",\"writeBatchTimeout\":\"datadrfqyqvhzkfyv\",\"sinkRetryCount\":\"dataenozlgxqfghlosh\",\"sinkRetryWait\":\"datamkpcmtsbande\",\"maxConcurrentConnections\":\"datalvuewrljmlod\",\"disableMetricsCollection\":\"datazvtfyqeogwbs\",\"\":{\"grgaosttbw\":\"datawxhcygfgqmdbaz\",\"ydsdmacydqacgyvw\":\"datapoyanir\"}}") - .toObject(AzureQueueSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureQueueSink model = new AzureQueueSink().withWriteBatchSize("datahehhtltwv") - .withWriteBatchTimeout("datadrfqyqvhzkfyv") - .withSinkRetryCount("dataenozlgxqfghlosh") - .withSinkRetryWait("datamkpcmtsbande") - .withMaxConcurrentConnections("datalvuewrljmlod") - .withDisableMetricsCollection("datazvtfyqeogwbs"); - model = BinaryData.fromObject(model).toObject(AzureQueueSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java deleted file mode 100644 index 4cdec3d93ee1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSearchIndexDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureSearchIndexDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSearchIndexDataset model = BinaryData.fromString( - "{\"type\":\"AzureSearchIndex\",\"typeProperties\":{\"indexName\":\"dataki\"},\"description\":\"oifm\",\"structure\":\"datalpwdjraurf\",\"schema\":\"dataznurttu\",\"linkedServiceName\":{\"referenceName\":\"aaaxx\",\"parameters\":{\"ecxstowa\":\"datajmdkqtxfrm\",\"tjaqgbl\":\"dataehxuihwes\",\"spsaneyvaerpiob\":\"datakncypmte\"}},\"parameters\":{\"raq\":{\"type\":\"Float\",\"defaultValue\":\"datarjokjwqd\"},\"f\":{\"type\":\"Bool\",\"defaultValue\":\"datappqcaigazwfwl\"},\"odt\":{\"type\":\"Float\",\"defaultValue\":\"datalzs\"},\"ytjwgetfigw\":{\"type\":\"Array\",\"defaultValue\":\"datarslzymqxserwycu\"}},\"annotations\":[\"datajxzi\",\"dataebjrahgdstubwg\"],\"folder\":{\"name\":\"sshxliqm\"},\"\":{\"alw\":\"datawhfmdoiiyobqzw\",\"crmvjfmr\":\"datasofxc\",\"elsxfkzr\":\"datauydldp\",\"rjwbuocqflm\":\"datatirjvqxvwkiocxo\"}}") - .toObject(AzureSearchIndexDataset.class); - Assertions.assertEquals("oifm", model.description()); - Assertions.assertEquals("aaaxx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("raq").type()); - Assertions.assertEquals("sshxliqm", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSearchIndexDataset model - = new AzureSearchIndexDataset().withDescription("oifm") - .withStructure("datalpwdjraurf") - .withSchema("dataznurttu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aaaxx") - .withParameters(mapOf("ecxstowa", "datajmdkqtxfrm", "tjaqgbl", "dataehxuihwes", "spsaneyvaerpiob", - "datakncypmte"))) - .withParameters(mapOf("raq", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarjokjwqd"), "f", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datappqcaigazwfwl"), - "odt", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datalzs"), - "ytjwgetfigw", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datarslzymqxserwycu"))) - .withAnnotations(Arrays.asList("datajxzi", "dataebjrahgdstubwg")) - .withFolder(new DatasetFolder().withName("sshxliqm")) - .withIndexName("dataki"); - model = BinaryData.fromObject(model).toObject(AzureSearchIndexDataset.class); - Assertions.assertEquals("oifm", model.description()); - Assertions.assertEquals("aaaxx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("raq").type()); - Assertions.assertEquals("sshxliqm", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java deleted file mode 100644 index d2591ba06eaa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureSearchIndexDatasetTypeProperties; - -public final class AzureSearchIndexDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSearchIndexDatasetTypeProperties model = BinaryData.fromString("{\"indexName\":\"datalrlqxbctatezyozd\"}") - .toObject(AzureSearchIndexDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSearchIndexDatasetTypeProperties model - = new AzureSearchIndexDatasetTypeProperties().withIndexName("datalrlqxbctatezyozd"); - model = BinaryData.fromObject(model).toObject(AzureSearchIndexDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java deleted file mode 100644 index d413f0d418fb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSearchIndexSinkTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSearchIndexSink; -import com.azure.resourcemanager.datafactory.models.AzureSearchIndexWriteBehaviorType; -import org.junit.jupiter.api.Assertions; - -public final class AzureSearchIndexSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSearchIndexSink model = BinaryData.fromString( - "{\"type\":\"AzureSearchIndexSink\",\"writeBehavior\":\"Merge\",\"writeBatchSize\":\"datatt\",\"writeBatchTimeout\":\"datay\",\"sinkRetryCount\":\"datavkkuzrvcegyz\",\"sinkRetryWait\":\"datahcfuwmx\",\"maxConcurrentConnections\":\"datazumklroogflhho\",\"disableMetricsCollection\":\"dataxblyokjwss\",\"\":{\"gg\":\"datatwoukdhnfer\",\"wgqgc\":\"dataihnzvoeh\",\"zjxouxigdwpgmh\":\"datakghg\"}}") - .toObject(AzureSearchIndexSink.class); - Assertions.assertEquals(AzureSearchIndexWriteBehaviorType.MERGE, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSearchIndexSink model = new AzureSearchIndexSink().withWriteBatchSize("datatt") - .withWriteBatchTimeout("datay") - .withSinkRetryCount("datavkkuzrvcegyz") - .withSinkRetryWait("datahcfuwmx") - .withMaxConcurrentConnections("datazumklroogflhho") - .withDisableMetricsCollection("dataxblyokjwss") - .withWriteBehavior(AzureSearchIndexWriteBehaviorType.MERGE); - model = BinaryData.fromObject(model).toObject(AzureSearchIndexSink.class); - Assertions.assertEquals(AzureSearchIndexWriteBehaviorType.MERGE, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java deleted file mode 100644 index 20cd707350c7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSqlDWTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureSqlDWTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlDWTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlDWTable\",\"typeProperties\":{\"tableName\":\"datatnxodwxm\",\"schema\":\"datajwiygmgsevmdmze\",\"table\":\"datarstgfczljdnc\"},\"description\":\"tjva\",\"structure\":\"datayznm\",\"schema\":\"datacdo\",\"linkedServiceName\":{\"referenceName\":\"cvucgytoxu\",\"parameters\":{\"bgszplusdek\":\"datatnzqsaqm\",\"kyejidbdq\":\"datadzzmssgpgv\"}},\"parameters\":{\"wztlvvwsnmrkky\":{\"type\":\"String\",\"defaultValue\":\"datay\"},\"auxuvavcpfpdofu\":{\"type\":\"Float\",\"defaultValue\":\"dataepwpwfk\"},\"lfngojfsqebuuxj\":{\"type\":\"Bool\",\"defaultValue\":\"datalbtxluevsolzw\"}},\"annotations\":[\"dataxfjwp\",\"datakktpmbmxbmbr\"],\"folder\":{\"name\":\"zx\"},\"\":{\"mhweqjfyxyd\":\"datakhxsdplaumy\",\"vxakglh\":\"datatokvqbvwglgwz\",\"fga\":\"datases\"}}") - .toObject(AzureSqlDWTableDataset.class); - Assertions.assertEquals("tjva", model.description()); - Assertions.assertEquals("cvucgytoxu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("wztlvvwsnmrkky").type()); - Assertions.assertEquals("zx", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlDWTableDataset model = new AzureSqlDWTableDataset().withDescription("tjva") - .withStructure("datayznm") - .withSchema("datacdo") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cvucgytoxu") - .withParameters(mapOf("bgszplusdek", "datatnzqsaqm", "kyejidbdq", "datadzzmssgpgv"))) - .withParameters(mapOf("wztlvvwsnmrkky", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datay"), - "auxuvavcpfpdofu", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataepwpwfk"), - "lfngojfsqebuuxj", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalbtxluevsolzw"))) - .withAnnotations(Arrays.asList("dataxfjwp", "datakktpmbmxbmbr")) - .withFolder(new DatasetFolder().withName("zx")) - .withTableName("datatnxodwxm") - .withSchemaTypePropertiesSchema("datajwiygmgsevmdmze") - .withTable("datarstgfczljdnc"); - model = BinaryData.fromObject(model).toObject(AzureSqlDWTableDataset.class); - Assertions.assertEquals("tjva", model.description()); - Assertions.assertEquals("cvucgytoxu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("wztlvvwsnmrkky").type()); - Assertions.assertEquals("zx", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java deleted file mode 100644 index fcada9d31483..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlDWTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlDWTableDatasetTypeProperties; - -public final class AzureSqlDWTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlDWTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataiydvxc\",\"schema\":\"datahyhgo\",\"table\":\"datasoy\"}") - .toObject(AzureSqlDWTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlDWTableDatasetTypeProperties model - = new AzureSqlDWTableDatasetTypeProperties().withTableName("dataiydvxc") - .withSchema("datahyhgo") - .withTable("datasoy"); - model = BinaryData.fromObject(model).toObject(AzureSqlDWTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java deleted file mode 100644 index a41681fb520d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSqlMITableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureSqlMITableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlMITableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlMITable\",\"typeProperties\":{\"tableName\":\"dataifvulxf\",\"schema\":\"datayrvjlgdezvjq\",\"table\":\"datahoyiyaxqvjweiw\"},\"description\":\"zkddnvovb\",\"structure\":\"dataqbmdqrxyglaets\",\"schema\":\"datalw\",\"linkedServiceName\":{\"referenceName\":\"jdtlriefooyycux\",\"parameters\":{\"ipsdudgcozzomehx\":\"datac\",\"lczipvwdtgc\":\"dataantolamlbijuxkq\",\"kwx\":\"datazdqiqdlrat\",\"lhpryjfzihuio\":\"dataauwxsuykznhrfgsl\"}},\"parameters\":{\"kbdgddkrh\":{\"type\":\"Bool\",\"defaultValue\":\"dataajtfey\"},\"kzwtjwwguzytij\":{\"type\":\"Bool\",\"defaultValue\":\"dataxbeuuqu\"}},\"annotations\":[\"datanondegjdyd\"],\"folder\":{\"name\":\"kkbjuckcatuq\"},\"\":{\"lyseidtoak\":\"dataowcnxtpz\",\"rytgrhzbqfdpfawr\":\"datat\",\"gdirda\":\"datatvcshtkutzcttb\",\"zjgcfjfx\":\"datam\"}}") - .toObject(AzureSqlMITableDataset.class); - Assertions.assertEquals("zkddnvovb", model.description()); - Assertions.assertEquals("jdtlriefooyycux", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kbdgddkrh").type()); - Assertions.assertEquals("kkbjuckcatuq", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlMITableDataset model = new AzureSqlMITableDataset().withDescription("zkddnvovb") - .withStructure("dataqbmdqrxyglaets") - .withSchema("datalw") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jdtlriefooyycux") - .withParameters(mapOf("ipsdudgcozzomehx", "datac", "lczipvwdtgc", "dataantolamlbijuxkq", "kwx", - "datazdqiqdlrat", "lhpryjfzihuio", "dataauwxsuykznhrfgsl"))) - .withParameters(mapOf("kbdgddkrh", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataajtfey"), - "kzwtjwwguzytij", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataxbeuuqu"))) - .withAnnotations(Arrays.asList("datanondegjdyd")) - .withFolder(new DatasetFolder().withName("kkbjuckcatuq")) - .withTableName("dataifvulxf") - .withSchemaTypePropertiesSchema("datayrvjlgdezvjq") - .withTable("datahoyiyaxqvjweiw"); - model = BinaryData.fromObject(model).toObject(AzureSqlMITableDataset.class); - Assertions.assertEquals("zkddnvovb", model.description()); - Assertions.assertEquals("jdtlriefooyycux", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("kbdgddkrh").type()); - Assertions.assertEquals("kkbjuckcatuq", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java deleted file mode 100644 index b925075808a6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlMITableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlMITableDatasetTypeProperties; - -public final class AzureSqlMITableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlMITableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datawjjirmu\",\"schema\":\"datagftt\",\"table\":\"dataofgeoagf\"}") - .toObject(AzureSqlMITableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlMITableDatasetTypeProperties model - = new AzureSqlMITableDatasetTypeProperties().withTableName("datawjjirmu") - .withSchema("datagftt") - .withTable("dataofgeoagf"); - model = BinaryData.fromObject(model).toObject(AzureSqlMITableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java deleted file mode 100644 index 12b3284c9585..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlSourceTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSqlSource; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; - -public final class AzureSqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlSource model = BinaryData.fromString( - "{\"type\":\"AzureSqlSource\",\"sqlReaderQuery\":\"datavxea\",\"sqlReaderStoredProcedureName\":\"dataf\",\"storedProcedureParameters\":\"datahw\",\"isolationLevel\":\"dataunptsry\",\"produceAdditionalTypes\":\"dataa\",\"partitionOption\":\"databwbxvsytbx\",\"partitionSettings\":{\"partitionColumnName\":\"databaddlmj\",\"partitionUpperBound\":\"datali\",\"partitionLowerBound\":\"datarc\"},\"queryTimeout\":\"datathluzey\",\"additionalColumns\":\"datalezkyfykmnrea\",\"sourceRetryCount\":\"datawepqegtytby\",\"sourceRetryWait\":\"dataeufutfq\",\"maxConcurrentConnections\":\"datawvnjgjrykshiz\",\"disableMetricsCollection\":\"datasw\",\"\":{\"emeft\":\"dataewvskzwqzwsguipq\"}}") - .toObject(AzureSqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlSource model = new AzureSqlSource().withSourceRetryCount("datawepqegtytby") - .withSourceRetryWait("dataeufutfq") - .withMaxConcurrentConnections("datawvnjgjrykshiz") - .withDisableMetricsCollection("datasw") - .withQueryTimeout("datathluzey") - .withAdditionalColumns("datalezkyfykmnrea") - .withSqlReaderQuery("datavxea") - .withSqlReaderStoredProcedureName("dataf") - .withStoredProcedureParameters("datahw") - .withIsolationLevel("dataunptsry") - .withProduceAdditionalTypes("dataa") - .withPartitionOption("databwbxvsytbx") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("databaddlmj") - .withPartitionUpperBound("datali") - .withPartitionLowerBound("datarc")); - model = BinaryData.fromObject(model).toObject(AzureSqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java deleted file mode 100644 index 88dcf46f4f21..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSqlTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureSqlTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureSqlTable\",\"typeProperties\":{\"tableName\":\"dataofuworimmovzwde\",\"schema\":\"datakmvhzfo\",\"table\":\"datanyrvaprtgelg\"},\"description\":\"wikfyaqandmym\",\"structure\":\"dataoqjum\",\"schema\":\"datasfbpbvzop\",\"linkedServiceName\":{\"referenceName\":\"x\",\"parameters\":{\"faxdtnqifbsa\":\"datavsmcwo\"}},\"parameters\":{\"mo\":{\"type\":\"SecureString\",\"defaultValue\":\"datauesu\"},\"boel\":{\"type\":\"Int\",\"defaultValue\":\"datajy\"},\"iuorinikc\":{\"type\":\"Int\",\"defaultValue\":\"datatwfld\"},\"mtmqrx\":{\"type\":\"Int\",\"defaultValue\":\"dataksriw\"}},\"annotations\":[\"datavvyczy\",\"datayubtgmbxiqah\",\"dataagpx\"],\"folder\":{\"name\":\"plnupoyryef\"},\"\":{\"lnomfpb\":\"dataovyzt\"}}") - .toObject(AzureSqlTableDataset.class); - Assertions.assertEquals("wikfyaqandmym", model.description()); - Assertions.assertEquals("x", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("mo").type()); - Assertions.assertEquals("plnupoyryef", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlTableDataset model = new AzureSqlTableDataset().withDescription("wikfyaqandmym") - .withStructure("dataoqjum") - .withSchema("datasfbpbvzop") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("x").withParameters(mapOf("faxdtnqifbsa", "datavsmcwo"))) - .withParameters(mapOf("mo", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datauesu"), "boel", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datajy"), "iuorinikc", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatwfld"), "mtmqrx", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataksriw"))) - .withAnnotations(Arrays.asList("datavvyczy", "datayubtgmbxiqah", "dataagpx")) - .withFolder(new DatasetFolder().withName("plnupoyryef")) - .withTableName("dataofuworimmovzwde") - .withSchemaTypePropertiesSchema("datakmvhzfo") - .withTable("datanyrvaprtgelg"); - model = BinaryData.fromObject(model).toObject(AzureSqlTableDataset.class); - Assertions.assertEquals("wikfyaqandmym", model.description()); - Assertions.assertEquals("x", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("mo").type()); - Assertions.assertEquals("plnupoyryef", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java deleted file mode 100644 index dea2f0833cda..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSqlTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureSqlTableDatasetTypeProperties; - -public final class AzureSqlTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSqlTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"dataeegvyieztkutnj\",\"schema\":\"datal\",\"table\":\"datakrehyhtmj\"}") - .toObject(AzureSqlTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSqlTableDatasetTypeProperties model - = new AzureSqlTableDatasetTypeProperties().withTableName("dataeegvyieztkutnj") - .withSchema("datal") - .withTable("datakrehyhtmj"); - model = BinaryData.fromObject(model).toObject(AzureSqlTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java deleted file mode 100644 index b283639e33f1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureSynapseArtifactsLinkedService; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureSynapseArtifactsLinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSynapseArtifactsLinkedService model = BinaryData.fromString( - "{\"type\":\"AzureSynapseArtifacts\",\"typeProperties\":{\"endpoint\":\"dataz\",\"authentication\":\"dataogdjwuybcppdvuo\",\"workspaceResourceId\":\"datavk\"},\"connectVia\":{\"referenceName\":\"mkupbnkcwauyl\",\"parameters\":{\"qzylempahyuxxe\":\"dataskrpyfrtluka\",\"rnhq\":\"datakeonp\"}},\"description\":\"uvvysphjkxgfmes\",\"parameters\":{\"oorvzhdncqcuwtp\":{\"type\":\"Bool\",\"defaultValue\":\"datafynvckxtanl\"}},\"annotations\":[\"dataq\",\"datanolspvxpiegxlzd\",\"dataatptzkmfvdrkcw\"],\"\":{\"frzzxirxxkmozkd\":\"datanwsffiahf\",\"vpyznjwm\":\"datazhrhki\",\"bzzwsesqytktvof\":\"datauuzny\"}}") - .toObject(AzureSynapseArtifactsLinkedService.class); - Assertions.assertEquals("mkupbnkcwauyl", model.connectVia().referenceName()); - Assertions.assertEquals("uvvysphjkxgfmes", model.description()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("oorvzhdncqcuwtp").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSynapseArtifactsLinkedService model = new AzureSynapseArtifactsLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("mkupbnkcwauyl") - .withParameters(mapOf("qzylempahyuxxe", "dataskrpyfrtluka", "rnhq", "datakeonp"))) - .withDescription("uvvysphjkxgfmes") - .withParameters(mapOf("oorvzhdncqcuwtp", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafynvckxtanl"))) - .withAnnotations(Arrays.asList("dataq", "datanolspvxpiegxlzd", "dataatptzkmfvdrkcw")) - .withEndpoint("dataz") - .withAuthentication("dataogdjwuybcppdvuo") - .withWorkspaceResourceId("datavk"); - model = BinaryData.fromObject(model).toObject(AzureSynapseArtifactsLinkedService.class); - Assertions.assertEquals("mkupbnkcwauyl", model.connectVia().referenceName()); - Assertions.assertEquals("uvvysphjkxgfmes", model.description()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("oorvzhdncqcuwtp").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java deleted file mode 100644 index 6e5d2d1f6112..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureSynapseArtifactsLinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureSynapseArtifactsLinkedServiceTypeProperties; - -public final class AzureSynapseArtifactsLinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureSynapseArtifactsLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"endpoint\":\"dataxorhsxcsoaxcme\",\"authentication\":\"datapxtbxyn\",\"workspaceResourceId\":\"databwkdi\"}") - .toObject(AzureSynapseArtifactsLinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureSynapseArtifactsLinkedServiceTypeProperties model - = new AzureSynapseArtifactsLinkedServiceTypeProperties().withEndpoint("dataxorhsxcsoaxcme") - .withAuthentication("datapxtbxyn") - .withWorkspaceResourceId("databwkdi"); - model = BinaryData.fromObject(model).toObject(AzureSynapseArtifactsLinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java deleted file mode 100644 index a78ab82d3eb8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class AzureTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureTableDataset model = BinaryData.fromString( - "{\"type\":\"AzureTable\",\"typeProperties\":{\"tableName\":\"databtsertoxa\"},\"description\":\"xuvj\",\"structure\":\"datamnrqstjcmetwml\",\"schema\":\"datacvnpv\",\"linkedServiceName\":{\"referenceName\":\"dhgjnaqyqi\",\"parameters\":{\"svmwbi\":\"datazmvc\",\"qich\":\"dataekdtfobvfiyb\",\"cv\":\"dataybfzdqekivycp\"}},\"parameters\":{\"ryfmxmdu\":{\"type\":\"SecureString\",\"defaultValue\":\"datarqtbhtrezpzl\"},\"gdpri\":{\"type\":\"Object\",\"defaultValue\":\"datakgilnoudccgnd\"}},\"annotations\":[\"dataqyeqfcbuulpyuflq\",\"datafsh\"],\"folder\":{\"name\":\"cyohigimwdcs\"},\"\":{\"kixnmbzmecu\":\"dataqnq\",\"rietvfp\":\"datarzwimbzayspz\",\"gtpvdva\":\"datamdzxp\",\"kkzovlzdm\":\"datalzmgschnzrs\"}}") - .toObject(AzureTableDataset.class); - Assertions.assertEquals("xuvj", model.description()); - Assertions.assertEquals("dhgjnaqyqi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ryfmxmdu").type()); - Assertions.assertEquals("cyohigimwdcs", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureTableDataset model - = new AzureTableDataset().withDescription("xuvj") - .withStructure("datamnrqstjcmetwml") - .withSchema("datacvnpv") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dhgjnaqyqi") - .withParameters(mapOf("svmwbi", "datazmvc", "qich", "dataekdtfobvfiyb", "cv", "dataybfzdqekivycp"))) - .withParameters(mapOf("ryfmxmdu", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datarqtbhtrezpzl"), - "gdpri", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakgilnoudccgnd"))) - .withAnnotations(Arrays.asList("dataqyeqfcbuulpyuflq", "datafsh")) - .withFolder(new DatasetFolder().withName("cyohigimwdcs")) - .withTableName("databtsertoxa"); - model = BinaryData.fromObject(model).toObject(AzureTableDataset.class); - Assertions.assertEquals("xuvj", model.description()); - Assertions.assertEquals("dhgjnaqyqi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ryfmxmdu").type()); - Assertions.assertEquals("cyohigimwdcs", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java deleted file mode 100644 index fce7019bb313..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.AzureTableDatasetTypeProperties; - -public final class AzureTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datafmfws\"}").toObject(AzureTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureTableDatasetTypeProperties model = new AzureTableDatasetTypeProperties().withTableName("datafmfws"); - model = BinaryData.fromObject(model).toObject(AzureTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java deleted file mode 100644 index 47d7435d0f55..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/AzureTableSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.AzureTableSource; - -public final class AzureTableSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AzureTableSource model = BinaryData.fromString( - "{\"type\":\"AzureTableSource\",\"azureTableSourceQuery\":\"datacvtelmdrmasvg\",\"azureTableSourceIgnoreTableNotFound\":\"datahlbkquhmb\",\"queryTimeout\":\"dataismviaa\",\"additionalColumns\":\"dataexsrglxljuyv\",\"sourceRetryCount\":\"datapovzespdip\",\"sourceRetryWait\":\"dataqnpdjomd\",\"maxConcurrentConnections\":\"datadwosjxywwvilkyht\",\"disableMetricsCollection\":\"dataiyhdbbjgsjmcybrp\",\"\":{\"xtkghrrxau\":\"datan\"}}") - .toObject(AzureTableSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AzureTableSource model = new AzureTableSource().withSourceRetryCount("datapovzespdip") - .withSourceRetryWait("dataqnpdjomd") - .withMaxConcurrentConnections("datadwosjxywwvilkyht") - .withDisableMetricsCollection("dataiyhdbbjgsjmcybrp") - .withQueryTimeout("dataismviaa") - .withAdditionalColumns("dataexsrglxljuyv") - .withAzureTableSourceQuery("datacvtelmdrmasvg") - .withAzureTableSourceIgnoreTableNotFound("datahlbkquhmb"); - model = BinaryData.fromObject(model).toObject(AzureTableSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java deleted file mode 100644 index 7f317abaebe4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BigDataPoolParametrizationReferenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; -import com.azure.resourcemanager.datafactory.models.BigDataPoolReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class BigDataPoolParametrizationReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BigDataPoolParametrizationReference model - = BinaryData.fromString("{\"type\":\"BigDataPoolReference\",\"referenceName\":\"dataieary\"}") - .toObject(BigDataPoolParametrizationReference.class); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BigDataPoolParametrizationReference model - = new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) - .withReferenceName("dataieary"); - model = BinaryData.fromObject(model).toObject(BigDataPoolParametrizationReference.class); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java deleted file mode 100644 index 0151b43d46d9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTests.java +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BinaryDataset; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class BinaryDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BinaryDataset model = BinaryData.fromString( - "{\"type\":\"Binary\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datadmac\",\"fileName\":\"datakvnrpbjrmvgoqpl\",\"\":{\"llcz\":\"dataumkz\"}},\"compression\":{\"type\":\"datarwnhkgqggoxsst\",\"level\":\"datavrak\",\"\":{\"emjpequ\":\"dataynjcwmhlymgnukxr\"}}},\"description\":\"lzaudg\",\"structure\":\"dataf\",\"schema\":\"datalakkucddwnhcz\",\"linkedServiceName\":{\"referenceName\":\"utoucgjt\",\"parameters\":{\"icqqw\":\"dataway\",\"wkslvlized\":\"datav\"}},\"parameters\":{\"srgekzyqxadyfhb\":{\"type\":\"Float\",\"defaultValue\":\"datav\"},\"aqjsgyzstujr\":{\"type\":\"Float\",\"defaultValue\":\"datahojqttbspvkhg\"},\"fdrld\":{\"type\":\"Float\",\"defaultValue\":\"datakn\"}},\"annotations\":[\"datah\"],\"folder\":{\"name\":\"ttugyu\"},\"\":{\"urf\":\"datalda\",\"zciggbnvtxofwa\":\"dataazcsozjvx\",\"moeobdoe\":\"datazyxwhoe\"}}") - .toObject(BinaryDataset.class); - Assertions.assertEquals("lzaudg", model.description()); - Assertions.assertEquals("utoucgjt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("srgekzyqxadyfhb").type()); - Assertions.assertEquals("ttugyu", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BinaryDataset model = new BinaryDataset().withDescription("lzaudg") - .withStructure("dataf") - .withSchema("datalakkucddwnhcz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("utoucgjt") - .withParameters(mapOf("icqqw", "dataway", "wkslvlized", "datav"))) - .withParameters(mapOf("srgekzyqxadyfhb", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datav"), "aqjsgyzstujr", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datahojqttbspvkhg"), - "fdrld", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakn"))) - .withAnnotations(Arrays.asList("datah")) - .withFolder(new DatasetFolder().withName("ttugyu")) - .withLocation(new DatasetLocation().withFolderPath("datadmac") - .withFileName("datakvnrpbjrmvgoqpl") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withCompression(new DatasetCompression().withType("datarwnhkgqggoxsst") - .withLevel("datavrak") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(BinaryDataset.class); - Assertions.assertEquals("lzaudg", model.description()); - Assertions.assertEquals("utoucgjt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("srgekzyqxadyfhb").type()); - Assertions.assertEquals("ttugyu", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java deleted file mode 100644 index 2a51394b7c7c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryDatasetTypePropertiesTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.BinaryDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import java.util.HashMap; -import java.util.Map; - -public final class BinaryDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BinaryDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datafpnimtwuuhaueg\",\"fileName\":\"datawmnfeubtzjyrkwfu\",\"\":{\"umnqdurhzzfopu\":\"datahrrk\",\"opwnib\":\"dataoqusvwlu\"}},\"compression\":{\"type\":\"datatoztjd\",\"level\":\"datamqvfm\",\"\":{\"rbelfnzz\":\"dataddtgctxegtvgwy\"}}}") - .toObject(BinaryDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BinaryDatasetTypeProperties model = new BinaryDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datafpnimtwuuhaueg") - .withFileName("datawmnfeubtzjyrkwfu") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withCompression(new DatasetCompression().withType("datatoztjd") - .withLevel("datamqvfm") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(BinaryDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java deleted file mode 100644 index 781c86cb7676..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinaryReadSettingsTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BinaryReadSettings; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class BinaryReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BinaryReadSettings model = BinaryData.fromString( - "{\"type\":\"BinaryReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"kucarwnpw\":\"datatbxruuuyaarrrgjn\",\"theaxqyhflnlut\":\"datachlabttxfi\"}},\"\":{\"ukqurrtcf\":\"dataslell\"}}") - .toObject(BinaryReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BinaryReadSettings model = new BinaryReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); - model = BinaryData.fromObject(model).toObject(BinaryReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java deleted file mode 100644 index 0ea335c834c1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySinkTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BinarySink; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class BinarySinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BinarySink model = BinaryData.fromString( - "{\"type\":\"BinarySink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataaeiepvjr\",\"disableMetricsCollection\":\"dataksx\",\"copyBehavior\":\"datakb\",\"metadata\":[{\"name\":\"datawokrhh\",\"value\":\"dataahrmuw\"},{\"name\":\"datadyruo\",\"value\":\"datadtx\"},{\"name\":\"datanmjimgg\",\"value\":\"dataqgpldrn\"}],\"\":{\"etzufkvxe\":\"datadbnfbms\"}},\"writeBatchSize\":\"dataddrtngdc\",\"writeBatchTimeout\":\"datajzgzaeuu\",\"sinkRetryCount\":\"datavheqzl\",\"sinkRetryWait\":\"datavaskrgoodfhpyue\",\"maxConcurrentConnections\":\"dataynyddpjlizl\",\"disableMetricsCollection\":\"datahtcuglgmfz\",\"\":{\"lkqmznkcwiokuyk\":\"datalaflsjwa\",\"htcoelx\":\"datavxmobnehbb\",\"veuog\":\"datafnpxumgnjmsk\"}}") - .toObject(BinarySink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BinarySink model = new BinarySink().withWriteBatchSize("dataddrtngdc") - .withWriteBatchTimeout("datajzgzaeuu") - .withSinkRetryCount("datavheqzl") - .withSinkRetryWait("datavaskrgoodfhpyue") - .withMaxConcurrentConnections("dataynyddpjlizl") - .withDisableMetricsCollection("datahtcuglgmfz") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataaeiepvjr") - .withDisableMetricsCollection("dataksx") - .withCopyBehavior("datakb") - .withMetadata(Arrays.asList(new MetadataItem().withName("datawokrhh").withValue("dataahrmuw"), - new MetadataItem().withName("datadyruo").withValue("datadtx"), - new MetadataItem().withName("datanmjimgg").withValue("dataqgpldrn"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))); - model = BinaryData.fromObject(model).toObject(BinarySink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java deleted file mode 100644 index 141b6404b5e7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BinarySourceTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BinaryReadSettings; -import com.azure.resourcemanager.datafactory.models.BinarySource; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class BinarySourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BinarySource model = BinaryData.fromString( - "{\"type\":\"BinarySource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataaafkvqhbw\",\"disableMetricsCollection\":\"dataivqkyaghf\",\"\":{\"ve\":\"datalsz\",\"jpn\":\"datauxax\",\"di\":\"datambjr\"}},\"formatSettings\":{\"type\":\"BinaryReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"etidne\":\"datalzijosuzegmcmlzm\"}},\"\":{\"gowdavpqyhax\":\"datajgwnmxc\",\"gkwpbnefabgt\":\"dataorzozf\",\"ugddycfyfau\":\"dataggoppmxcm\"}},\"sourceRetryCount\":\"datamzq\",\"sourceRetryWait\":\"datawkesxvzcxxf\",\"maxConcurrentConnections\":\"dataj\",\"disableMetricsCollection\":\"dataqwbrzkmgyl\",\"\":{\"orrecoiqwnqliz\":\"dataxm\"}}") - .toObject(BinarySource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BinarySource model = new BinarySource().withSourceRetryCount("datamzq") - .withSourceRetryWait("datawkesxvzcxxf") - .withMaxConcurrentConnections("dataj") - .withDisableMetricsCollection("dataqwbrzkmgyl") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataaafkvqhbw") - .withDisableMetricsCollection("dataivqkyaghf") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new BinaryReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))); - model = BinaryData.fromObject(model).toObject(BinarySource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java deleted file mode 100644 index a41181d4c9ba..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BlobEventsTrigger; -import com.azure.resourcemanager.datafactory.models.BlobEventTypes; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class BlobEventsTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobEventsTrigger model = BinaryData.fromString( - "{\"type\":\"BlobEventsTrigger\",\"typeProperties\":{\"blobPathBeginsWith\":\"zckgbpysgzgiv\",\"blobPathEndsWith\":\"hektw\",\"ignoreEmptyBlobs\":true,\"events\":[\"Microsoft.Storage.BlobCreated\"],\"scope\":\"comjxxja\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"dhrkhfyaxi\",\"name\":\"nzsimbgvrksjjq\"},\"parameters\":{\"p\":\"datarbogzwwyub\"}},{\"pipelineReference\":{\"referenceName\":\"cjy\",\"name\":\"emgbkjxuxm\"},\"parameters\":{\"ekpgllezvrvjws\":\"databyjfeanbnw\",\"jbsvk\":\"datafkzlv\"}}],\"description\":\"ynvguhqugnqs\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datauwhmncewcfins\",\"dataimxxsy\",\"datatpqgxzogclu\"],\"\":{\"tcer\":\"datanckdxflgji\",\"errpal\":\"datal\"}}") - .toObject(BlobEventsTrigger.class); - Assertions.assertEquals("ynvguhqugnqs", model.description()); - Assertions.assertEquals("dhrkhfyaxi", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("nzsimbgvrksjjq", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("zckgbpysgzgiv", model.blobPathBeginsWith()); - Assertions.assertEquals("hektw", model.blobPathEndsWith()); - Assertions.assertEquals(true, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("comjxxja", model.scope()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobEventsTrigger model = new BlobEventsTrigger().withDescription("ynvguhqugnqs") - .withAnnotations(Arrays.asList("datauwhmncewcfins", "dataimxxsy", "datatpqgxzogclu")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("dhrkhfyaxi").withName("nzsimbgvrksjjq")) - .withParameters(mapOf("p", "datarbogzwwyub")), - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("cjy").withName("emgbkjxuxm")) - .withParameters(mapOf("ekpgllezvrvjws", "databyjfeanbnw", "jbsvk", "datafkzlv")))) - .withBlobPathBeginsWith("zckgbpysgzgiv") - .withBlobPathEndsWith("hektw") - .withIgnoreEmptyBlobs(true) - .withEvents(Arrays.asList(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED)) - .withScope("comjxxja"); - model = BinaryData.fromObject(model).toObject(BlobEventsTrigger.class); - Assertions.assertEquals("ynvguhqugnqs", model.description()); - Assertions.assertEquals("dhrkhfyaxi", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("nzsimbgvrksjjq", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("zckgbpysgzgiv", model.blobPathBeginsWith()); - Assertions.assertEquals("hektw", model.blobPathEndsWith()); - Assertions.assertEquals(true, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("comjxxja", model.scope()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java deleted file mode 100644 index 97f577e4b3e9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobEventsTriggerTypePropertiesTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.BlobEventsTriggerTypeProperties; -import com.azure.resourcemanager.datafactory.models.BlobEventTypes; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class BlobEventsTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobEventsTriggerTypeProperties model = BinaryData.fromString( - "{\"blobPathBeginsWith\":\"rsbgjjuhzfjmnaby\",\"blobPathEndsWith\":\"chhkwlmittpbi\",\"ignoreEmptyBlobs\":false,\"events\":[\"Microsoft.Storage.BlobCreated\",\"Microsoft.Storage.BlobCreated\"],\"scope\":\"nvybxplbdazsj\"}") - .toObject(BlobEventsTriggerTypeProperties.class); - Assertions.assertEquals("rsbgjjuhzfjmnaby", model.blobPathBeginsWith()); - Assertions.assertEquals("chhkwlmittpbi", model.blobPathEndsWith()); - Assertions.assertEquals(false, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("nvybxplbdazsj", model.scope()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobEventsTriggerTypeProperties model - = new BlobEventsTriggerTypeProperties().withBlobPathBeginsWith("rsbgjjuhzfjmnaby") - .withBlobPathEndsWith("chhkwlmittpbi") - .withIgnoreEmptyBlobs(false) - .withEvents(Arrays.asList(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, - BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED)) - .withScope("nvybxplbdazsj"); - model = BinaryData.fromObject(model).toObject(BlobEventsTriggerTypeProperties.class); - Assertions.assertEquals("rsbgjjuhzfjmnaby", model.blobPathBeginsWith()); - Assertions.assertEquals("chhkwlmittpbi", model.blobPathEndsWith()); - Assertions.assertEquals(false, model.ignoreEmptyBlobs()); - Assertions.assertEquals(BlobEventTypes.MICROSOFT_STORAGE_BLOB_CREATED, model.events().get(0)); - Assertions.assertEquals("nvybxplbdazsj", model.scope()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java deleted file mode 100644 index a9bf2a7bb305..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSinkTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BlobSink; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class BlobSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobSink model = BinaryData.fromString( - "{\"type\":\"BlobSink\",\"blobWriterOverwriteFiles\":\"dataow\",\"blobWriterDateTimeFormat\":\"datadefmebpalzpy\",\"blobWriterAddHeader\":\"datagwmrzrhcf\",\"copyBehavior\":\"datavmptnrzilvc\",\"metadata\":[{\"name\":\"datazwtlgoravo\",\"value\":\"datapnxpufvg\"}],\"writeBatchSize\":\"datafgmezfyelfxlbkbh\",\"writeBatchTimeout\":\"datakqfjzgy\",\"sinkRetryCount\":\"datat\",\"sinkRetryWait\":\"datakhgatynkih\",\"maxConcurrentConnections\":\"dataixyb\",\"disableMetricsCollection\":\"datawjzo\",\"\":{\"unvwvaolfg\":\"dataaenlzjxztgdu\",\"zht\":\"datatczzv\",\"chsrp\":\"dataeuiptud\",\"iokdrjdeyfnq\":\"datajkqfabjuaktshwup\"}}") - .toObject(BlobSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobSink model = new BlobSink().withWriteBatchSize("datafgmezfyelfxlbkbh") - .withWriteBatchTimeout("datakqfjzgy") - .withSinkRetryCount("datat") - .withSinkRetryWait("datakhgatynkih") - .withMaxConcurrentConnections("dataixyb") - .withDisableMetricsCollection("datawjzo") - .withBlobWriterOverwriteFiles("dataow") - .withBlobWriterDateTimeFormat("datadefmebpalzpy") - .withBlobWriterAddHeader("datagwmrzrhcf") - .withCopyBehavior("datavmptnrzilvc") - .withMetadata(Arrays.asList(new MetadataItem().withName("datazwtlgoravo").withValue("datapnxpufvg"))); - model = BinaryData.fromObject(model).toObject(BlobSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java deleted file mode 100644 index a62c6981b494..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BlobSource; - -public final class BlobSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobSource model = BinaryData.fromString( - "{\"type\":\"BlobSource\",\"treatEmptyAsNull\":\"datadkhkizyx\",\"skipHeaderLineCount\":\"dataxnhubjwipfry\",\"recursive\":\"datapezzyrpdxyg\",\"sourceRetryCount\":\"dataqxsemezkpd\",\"sourceRetryWait\":\"dataowftfrqebrjopot\",\"maxConcurrentConnections\":\"dataekfxmg\",\"disableMetricsCollection\":\"datawwidnrds\",\"\":{\"pfdwh\":\"datahuiadyhodisypg\"}}") - .toObject(BlobSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobSource model = new BlobSource().withSourceRetryCount("dataqxsemezkpd") - .withSourceRetryWait("dataowftfrqebrjopot") - .withMaxConcurrentConnections("dataekfxmg") - .withDisableMetricsCollection("datawwidnrds") - .withTreatEmptyAsNull("datadkhkizyx") - .withSkipHeaderLineCount("dataxnhubjwipfry") - .withRecursive("datapezzyrpdxyg"); - model = BinaryData.fromObject(model).toObject(BlobSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java deleted file mode 100644 index 68286321767f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.BlobTrigger; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class BlobTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobTrigger model = BinaryData.fromString( - "{\"type\":\"BlobTrigger\",\"typeProperties\":{\"folderPath\":\"odpm\",\"maxConcurrency\":1219528999,\"linkedService\":{\"referenceName\":\"sggneocqaejle\",\"parameters\":{\"d\":\"datadpqwucprpwsga\",\"fwmqi\":\"datacik\",\"ocepjsfhxhulrekr\":\"datalebctor\",\"dnzrcjokgthy\":\"datay\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"rwlguesoivaoryef\",\"name\":\"ovyceksdatjtgm\"},\"parameters\":{\"cqskrj\":\"dataqvindhixddcocs\"}}],\"description\":\"a\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datadrkcsh\",\"dataoxssf\"],\"\":{\"hzgxkwcqpvrrm\":\"dataxqhyy\",\"roqsdvxddsfyl\":\"datak\",\"swnnsb\":\"dataokbriykrxaevbura\",\"ojyn\":\"datakumxbcn\"}}") - .toObject(BlobTrigger.class); - Assertions.assertEquals("a", model.description()); - Assertions.assertEquals("rwlguesoivaoryef", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("ovyceksdatjtgm", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("odpm", model.folderPath()); - Assertions.assertEquals(1219528999, model.maxConcurrency()); - Assertions.assertEquals("sggneocqaejle", model.linkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobTrigger model = new BlobTrigger().withDescription("a") - .withAnnotations(Arrays.asList("datadrkcsh", "dataoxssf")) - .withPipelines(Arrays.asList(new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("rwlguesoivaoryef").withName("ovyceksdatjtgm")) - .withParameters(mapOf("cqskrj", "dataqvindhixddcocs")))) - .withFolderPath("odpm") - .withMaxConcurrency(1219528999) - .withLinkedService(new LinkedServiceReference().withReferenceName("sggneocqaejle") - .withParameters(mapOf("d", "datadpqwucprpwsga", "fwmqi", "datacik", "ocepjsfhxhulrekr", "datalebctor", - "dnzrcjokgthy", "datay"))); - model = BinaryData.fromObject(model).toObject(BlobTrigger.class); - Assertions.assertEquals("a", model.description()); - Assertions.assertEquals("rwlguesoivaoryef", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("ovyceksdatjtgm", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("odpm", model.folderPath()); - Assertions.assertEquals(1219528999, model.maxConcurrency()); - Assertions.assertEquals("sggneocqaejle", model.linkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java deleted file mode 100644 index 183b24a68e30..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/BlobTriggerTypePropertiesTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.BlobTriggerTypeProperties; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class BlobTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - BlobTriggerTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"hbtycfj\",\"maxConcurrency\":861668828,\"linkedService\":{\"referenceName\":\"xiapts\",\"parameters\":{\"vipxzzcxqdrqsu\":\"dataoybpwzniekedxvw\",\"ptzqazwybbewjvyr\":\"dataekzqybpoxqwcusl\",\"osmp\":\"dataownbwrnbmcblmzar\",\"abhpdkrjlwrqheh\":\"dataajx\"}}}") - .toObject(BlobTriggerTypeProperties.class); - Assertions.assertEquals("hbtycfj", model.folderPath()); - Assertions.assertEquals(861668828, model.maxConcurrency()); - Assertions.assertEquals("xiapts", model.linkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - BlobTriggerTypeProperties model = new BlobTriggerTypeProperties().withFolderPath("hbtycfj") - .withMaxConcurrency(861668828) - .withLinkedService(new LinkedServiceReference().withReferenceName("xiapts") - .withParameters(mapOf("vipxzzcxqdrqsu", "dataoybpwzniekedxvw", "ptzqazwybbewjvyr", - "dataekzqybpoxqwcusl", "osmp", "dataownbwrnbmcblmzar", "abhpdkrjlwrqheh", "dataajx"))); - model = BinaryData.fromObject(model).toObject(BlobTriggerTypeProperties.class); - Assertions.assertEquals("hbtycfj", model.folderPath()); - Assertions.assertEquals(861668828, model.maxConcurrency()); - Assertions.assertEquals("xiapts", model.linkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java deleted file mode 100644 index 2fb17f1258ff..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CassandraSourceTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CassandraSource; -import com.azure.resourcemanager.datafactory.models.CassandraSourceReadConsistencyLevels; -import org.junit.jupiter.api.Assertions; - -public final class CassandraSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CassandraSource model = BinaryData.fromString( - "{\"type\":\"CassandraSource\",\"query\":\"datahghkf\",\"consistencyLevel\":\"THREE\",\"queryTimeout\":\"datapbawtpwnkiwx\",\"additionalColumns\":\"datalqn\",\"sourceRetryCount\":\"dataozfygvsfafcar\",\"sourceRetryWait\":\"dataml\",\"maxConcurrentConnections\":\"dataqdwocufcs\",\"disableMetricsCollection\":\"datafc\",\"\":{\"ofwqdro\":\"datax\",\"egilbkzctqbvntl\":\"datak\"}}") - .toObject(CassandraSource.class); - Assertions.assertEquals(CassandraSourceReadConsistencyLevels.THREE, model.consistencyLevel()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CassandraSource model = new CassandraSource().withSourceRetryCount("dataozfygvsfafcar") - .withSourceRetryWait("dataml") - .withMaxConcurrentConnections("dataqdwocufcs") - .withDisableMetricsCollection("datafc") - .withQueryTimeout("datapbawtpwnkiwx") - .withAdditionalColumns("datalqn") - .withQuery("datahghkf") - .withConsistencyLevel(CassandraSourceReadConsistencyLevels.THREE); - model = BinaryData.fromObject(model).toObject(CassandraSource.class); - Assertions.assertEquals(CassandraSourceReadConsistencyLevels.THREE, model.consistencyLevel()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java deleted file mode 100644 index e7695a51a4f1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ChainingTrigger; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ChainingTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChainingTrigger model = BinaryData.fromString( - "{\"type\":\"ChainingTrigger\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"jhhuimgdfo\",\"name\":\"eeu\"},\"parameters\":{\"eplycvw\":\"dataavmd\",\"jwlrits\":\"datawognpuupaqjerq\",\"tjc\":\"datauxregfbffkzpfjmj\"}},\"typeProperties\":{\"dependsOn\":[{\"referenceName\":\"gljrlrkv\",\"name\":\"n\"}],\"runDimension\":\"sxmmpuksvoimdgrf\"},\"description\":\"ekieom\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datawitnihwlpzd\"],\"\":{\"hc\":\"dataghwxpgft\",\"gnwxrokpopp\":\"dataslieplpphfez\"}}") - .toObject(ChainingTrigger.class); - Assertions.assertEquals("ekieom", model.description()); - Assertions.assertEquals("jhhuimgdfo", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("eeu", model.pipeline().pipelineReference().name()); - Assertions.assertEquals("gljrlrkv", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("n", model.dependsOn().get(0).name()); - Assertions.assertEquals("sxmmpuksvoimdgrf", model.runDimension()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChainingTrigger model = new ChainingTrigger().withDescription("ekieom") - .withAnnotations(Arrays.asList("datawitnihwlpzd")) - .withPipeline(new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("jhhuimgdfo").withName("eeu")) - .withParameters( - mapOf("eplycvw", "dataavmd", "jwlrits", "datawognpuupaqjerq", "tjc", "datauxregfbffkzpfjmj"))) - .withDependsOn(Arrays.asList(new PipelineReference().withReferenceName("gljrlrkv").withName("n"))) - .withRunDimension("sxmmpuksvoimdgrf"); - model = BinaryData.fromObject(model).toObject(ChainingTrigger.class); - Assertions.assertEquals("ekieom", model.description()); - Assertions.assertEquals("jhhuimgdfo", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("eeu", model.pipeline().pipelineReference().name()); - Assertions.assertEquals("gljrlrkv", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("n", model.dependsOn().get(0).name()); - Assertions.assertEquals("sxmmpuksvoimdgrf", model.runDimension()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java deleted file mode 100644 index 7c38958179a6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChainingTriggerTypePropertiesTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ChainingTriggerTypeProperties; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ChainingTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChainingTriggerTypeProperties model = BinaryData.fromString( - "{\"dependsOn\":[{\"referenceName\":\"vtjrobowhcv\",\"name\":\"mlwadst\"},{\"referenceName\":\"xrgqmuthx\",\"name\":\"dmhypptfppmu\"}],\"runDimension\":\"wvezt\"}") - .toObject(ChainingTriggerTypeProperties.class); - Assertions.assertEquals("vtjrobowhcv", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("mlwadst", model.dependsOn().get(0).name()); - Assertions.assertEquals("wvezt", model.runDimension()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChainingTriggerTypeProperties model = new ChainingTriggerTypeProperties() - .withDependsOn(Arrays.asList(new PipelineReference().withReferenceName("vtjrobowhcv").withName("mlwadst"), - new PipelineReference().withReferenceName("xrgqmuthx").withName("dmhypptfppmu"))) - .withRunDimension("wvezt"); - model = BinaryData.fromObject(model).toObject(ChainingTriggerTypeProperties.class); - Assertions.assertEquals("vtjrobowhcv", model.dependsOn().get(0).referenceName()); - Assertions.assertEquals("mlwadst", model.dependsOn().get(0).name()); - Assertions.assertEquals("wvezt", model.runDimension()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java deleted file mode 100644 index a4af05236bae..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureFolderTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; -import org.junit.jupiter.api.Assertions; - -public final class ChangeDataCaptureFolderTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChangeDataCaptureFolder model - = BinaryData.fromString("{\"name\":\"tekva\"}").toObject(ChangeDataCaptureFolder.class); - Assertions.assertEquals("tekva", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChangeDataCaptureFolder model = new ChangeDataCaptureFolder().withName("tekva"); - model = BinaryData.fromObject(model).toObject(ChangeDataCaptureFolder.class); - Assertions.assertEquals("tekva", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java deleted file mode 100644 index 5a5556689bde..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureListResponseTests.java +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCaptureResourceInner; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureListResponse; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperPolicy; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ChangeDataCaptureListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChangeDataCaptureListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"sbs\"},\"description\":\"qqagwwr\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datak\",\"datahltnjadhqoawjq\",\"datayueayfbpcmsp\",\"databyrrueqth\"]}],\"policy\":{\"mode\":\"gnmbscbbxigdhx\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":391572277}},\"allowVNetOverride\":true,\"status\":\"wdpyq\"},\"name\":\"bxubmdna\",\"type\":\"bqwremjela\",\"etag\":\"cigeleohdbvqvw\",\"\":{\"ybxc\":\"dataopwbeonrlkwzd\",\"hkrttzrazis\":\"dataakxcptsoqfyiaseq\",\"vanbwzohmnrxxbs\":\"dataykiue\",\"dptysprqs\":\"datajklinh\"},\"id\":\"zxojpslsv\"},{\"properties\":{\"folder\":{\"name\":\"liufiqwoyxq\"},\"description\":\"pcohhoucqpqojx\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"type\":\"linkedservicetype\"}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{}],\"connection\":{\"type\":\"linkedservicetype\"},\"dataMapperMappings\":[{}],\"relationships\":[\"databcawetz\",\"dataddtjwfljhznam\",\"datauatmzwcjjncqtj\",\"datamizvgbgatzuuvbx\"]}],\"policy\":{\"mode\":\"rebwggahtt\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":2015075078}},\"allowVNetOverride\":false,\"status\":\"utlxjoqzasunwqrj\"},\"name\":\"rg\",\"type\":\"aohcmbuocn\",\"etag\":\"ohmbpyr\",\"\":{\"vkfkmr\":\"datameblyd\"},\"id\":\"xne\"}],\"nextLink\":\"sm\"}") - .toObject(ChangeDataCaptureListResponse.class); - Assertions.assertEquals("zxojpslsv", model.value().get(0).id()); - Assertions.assertEquals("sbs", model.value().get(0).folder().name()); - Assertions.assertEquals("qqagwwr", model.value().get(0).description()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.value().get(0).sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.value().get(0).targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals("gnmbscbbxigdhx", model.value().get(0).policy().mode()); - Assertions.assertEquals(FrequencyType.MINUTE, model.value().get(0).policy().recurrence().frequency()); - Assertions.assertEquals(391572277, model.value().get(0).policy().recurrence().interval()); - Assertions.assertEquals(true, model.value().get(0).allowVNetOverride()); - Assertions.assertEquals("wdpyq", model.value().get(0).status()); - Assertions.assertEquals("sm", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChangeDataCaptureListResponse model - = new ChangeDataCaptureListResponse() - .withValue( - Arrays - .asList( - new ChangeDataCaptureResourceInner().withId("zxojpslsv") - .withFolder(new ChangeDataCaptureFolder().withName("sbs")) - .withDescription("qqagwwr") - .withSourceConnectionsInfo(Arrays.asList(new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo(Arrays.asList(new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), - new DataMapperMapping(), new DataMapperMapping(), new DataMapperMapping())) - .withRelationships(Arrays.asList("datak", "datahltnjadhqoawjq", "datayueayfbpcmsp", - "databyrrueqth")))) - .withPolicy(new MapperPolicy().withMode("gnmbscbbxigdhx") - .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE) - .withInterval(391572277))) - .withAllowVNetOverride(true) - .withStatus("wdpyq") - .withAdditionalProperties( - mapOf("name", "bxubmdna", "etag", "cigeleohdbvqvw", "type", "bqwremjela")), - new ChangeDataCaptureResourceInner().withId("xne") - .withFolder(new ChangeDataCaptureFolder().withName("liufiqwoyxq")) - .withDescription("pcohhoucqpqojx") - .withSourceConnectionsInfo(Arrays.asList(new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), - new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)))) - .withTargetConnectionsInfo(Arrays.asList(new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection().withType(ConnectionType.LINKEDSERVICETYPE)) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("databcawetz", "dataddtjwfljhznam", - "datauatmzwcjjncqtj", "datamizvgbgatzuuvbx")))) - .withPolicy(new MapperPolicy().withMode("rebwggahtt") - .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR) - .withInterval(2015075078))) - .withAllowVNetOverride(false) - .withStatus("utlxjoqzasunwqrj") - .withAdditionalProperties( - mapOf("name", "rg", "etag", "ohmbpyr", "type", "aohcmbuocn")))) - .withNextLink("sm"); - model = BinaryData.fromObject(model).toObject(ChangeDataCaptureListResponse.class); - Assertions.assertEquals("zxojpslsv", model.value().get(0).id()); - Assertions.assertEquals("sbs", model.value().get(0).folder().name()); - Assertions.assertEquals("qqagwwr", model.value().get(0).description()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.value().get(0).sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.value().get(0).targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals("gnmbscbbxigdhx", model.value().get(0).policy().mode()); - Assertions.assertEquals(FrequencyType.MINUTE, model.value().get(0).policy().recurrence().frequency()); - Assertions.assertEquals(391572277, model.value().get(0).policy().recurrence().interval()); - Assertions.assertEquals(true, model.value().get(0).allowVNetOverride()); - Assertions.assertEquals("wdpyq", model.value().get(0).status()); - Assertions.assertEquals("sm", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java deleted file mode 100644 index ef509c8dfa88..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureResourceInnerTests.java +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCaptureResourceInner; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMappings; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperPolicy; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ChangeDataCaptureResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChangeDataCaptureResourceInner model = BinaryData.fromString( - "{\"properties\":{\"folder\":{\"name\":\"lfpya\"},\"description\":\"cygvoavyunssx\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"egjlgvvpa\",\"properties\":{}},{\"name\":\"sgb\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"tu\",\"parameters\":{\"fulopmjnlexwhcb\":\"datahgaqipirpiwrq\"}},\"linkedServiceType\":\"ibkeph\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{\"name\":\"oyin\",\"properties\":{}},{\"name\":\"brlc\",\"properties\":{}},{\"name\":\"uczkgofxyfsruc\",\"properties\":{}},{\"name\":\"rpcjttbstvjeaqnr\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"koxmlghk\",\"parameters\":{\"wex\":\"datadvrmazlpd\",\"wvqsgny\":\"datamzvlazipbh\"}},\"linkedServiceType\":\"uzivensrpmeyyvp\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"gsksrfhf\",\"properties\":{}},{\"name\":\"mknbnxwcdommpv\",\"properties\":{}}],\"connection\":{\"linkedService\":{\"referenceName\":\"zfgbrttuiaclkie\",\"parameters\":{\"yuttdiygbpvnwswm\":\"datajlfnthiq\",\"l\":\"dataxkyctwwgzwx\",\"ygzyvneezaifght\":\"dataecvo\"}},\"linkedServiceType\":\"qqtlffhzbkrkj\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{\"targetEntityName\":\"nq\",\"sourceEntityName\":\"wdogiyetesyp\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"dataztjhqtfbovnynkbw\"},{\"targetEntityName\":\"njuhpsprkzya\",\"sourceEntityName\":\"ia\",\"sourceConnectionReference\":{},\"attributeMappingInfo\":{},\"sourceDenormalizeInfo\":\"dataafbwqroohtuovmao\"}],\"relationships\":[\"datajtumgh\",\"datahpv\",\"datacmslclblyjxltbs\"]}],\"policy\":{\"mode\":\"scvsfxigctm\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":45764322}},\"allowVNetOverride\":true,\"status\":\"ccyd\"},\"name\":\"ceukdqkkyihztg\",\"type\":\"mgqzgwldoyc\",\"etag\":\"llcecfehuwaoa\",\"\":{\"lizst\":\"dataicq\"},\"id\":\"sjvh\"}") - .toObject(ChangeDataCaptureResourceInner.class); - Assertions.assertEquals("sjvh", model.id()); - Assertions.assertEquals("lfpya", model.folder().name()); - Assertions.assertEquals("cygvoavyunssx", model.description()); - Assertions.assertEquals("egjlgvvpa", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("tu", - model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ibkeph", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("gsksrfhf", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("zfgbrttuiaclkie", - model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("qqtlffhzbkrkj", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("nq", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("wdogiyetesyp", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("scvsfxigctm", model.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.policy().recurrence().frequency()); - Assertions.assertEquals(45764322, model.policy().recurrence().interval()); - Assertions.assertEquals(true, model.allowVNetOverride()); - Assertions.assertEquals("ccyd", model.status()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChangeDataCaptureResourceInner model = new ChangeDataCaptureResourceInner().withId("sjvh") - .withFolder(new ChangeDataCaptureFolder().withName("lfpya")) - .withDescription("cygvoavyunssx") - .withSourceConnectionsInfo(Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities( - Arrays.asList(new MapperTable().withName("egjlgvvpa"), new MapperTable().withName("sgb"))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("tu") - .withParameters(mapOf("fulopmjnlexwhcb", "datahgaqipirpiwrq"))) - .withLinkedServiceType("ibkeph") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))), - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable().withName("oyin"), - new MapperTable().withName("brlc"), new MapperTable().withName("uczkgofxyfsruc"), - new MapperTable().withName("rpcjttbstvjeaqnr"))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("koxmlghk") - .withParameters(mapOf("wex", "datadvrmazlpd", "wvqsgny", "datamzvlazipbh"))) - .withLinkedServiceType("uzivensrpmeyyvp") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))))) - .withTargetConnectionsInfo( - Arrays - .asList( - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable().withName("gsksrfhf"), - new MapperTable().withName("mknbnxwcdommpv"))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("zfgbrttuiaclkie") - .withParameters(mapOf("yuttdiygbpvnwswm", "datajlfnthiq", "l", "dataxkyctwwgzwx", - "ygzyvneezaifght", "dataecvo"))) - .withLinkedServiceType("qqtlffhzbkrkj") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("nq") - .withSourceEntityName("wdogiyetesyp") - .withSourceConnectionReference(new MapperConnectionReference()) - .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("dataztjhqtfbovnynkbw"), - new DataMapperMapping().withTargetEntityName("njuhpsprkzya") - .withSourceEntityName("ia") - .withSourceConnectionReference(new MapperConnectionReference()) - .withAttributeMappingInfo(new MapperAttributeMappings()) - .withSourceDenormalizeInfo("dataafbwqroohtuovmao"))) - .withRelationships(Arrays.asList("datajtumgh", "datahpv", "datacmslclblyjxltbs")))) - .withPolicy(new MapperPolicy().withMode("scvsfxigctm") - .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(45764322))) - .withAllowVNetOverride(true) - .withStatus("ccyd") - .withAdditionalProperties(mapOf("name", "ceukdqkkyihztg", "etag", "llcecfehuwaoa", "type", "mgqzgwldoyc")); - model = BinaryData.fromObject(model).toObject(ChangeDataCaptureResourceInner.class); - Assertions.assertEquals("sjvh", model.id()); - Assertions.assertEquals("lfpya", model.folder().name()); - Assertions.assertEquals("cygvoavyunssx", model.description()); - Assertions.assertEquals("egjlgvvpa", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("tu", - model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ibkeph", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("gsksrfhf", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("zfgbrttuiaclkie", - model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("qqtlffhzbkrkj", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("nq", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("wdogiyetesyp", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("scvsfxigctm", model.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.policy().recurrence().frequency()); - Assertions.assertEquals(45764322, model.policy().recurrence().interval()); - Assertions.assertEquals(true, model.allowVNetOverride()); - Assertions.assertEquals("ccyd", model.status()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java deleted file mode 100644 index 5950d88f2a2b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCaptureTests.java +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCapture; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMappings; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperPolicy; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ChangeDataCaptureTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ChangeDataCapture model = BinaryData.fromString( - "{\"folder\":{\"name\":\"ftkwq\"},\"description\":\"pmvssehaep\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{\"name\":\"tczhupeuknijd\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"djfbocyv\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"wikdmh\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"hgauacdixmxufrs\",\"parameters\":{\"kfnozoeoqbvj\":\"dataqg\",\"ay\":\"datavefgwbmqjchntas\",\"wyvf\":\"dataxbulpzealbmqkyo\"}},\"linkedServiceType\":\"btsuahxs\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"rrscubiwsdrnp\",\"value\":\"datawodiffjxc\"},{\"name\":\"mmuabwi\",\"value\":\"datajogjonmc\"},{\"name\":\"foyzbamwineof\",\"value\":\"datakak\"},{\"name\":\"ldtve\",\"value\":\"dataoclzhz\"}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{\"name\":\"xgvttxp\",\"properties\":{\"schema\":[{},{},{},{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"mrdixtreki\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"ruffgllukk\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"pqhvmblcouqehbhb\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"an\",\"parameters\":{\"xldykalsygaolnjp\":\"datapmbltoormkfql\",\"mr\":\"datanbmjksibjgsjjxxa\",\"yqegx\":\"dataad\",\"inbmh\":\"dataiv\"}},\"linkedServiceType\":\"jijkgqxnhmbke\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"aan\",\"value\":\"datagiycwkdtaawxwfek\"},{\"name\":\"mrrqmbzmqkratb\",\"value\":\"datawbjsidbirkf\"},{\"name\":\"sokdgoge\",\"value\":\"datajymrhbg\"},{\"name\":\"ozkyewnfnzh\",\"value\":\"dataqo\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"jkutycyarnr\",\"sourceEntityName\":\"hguabzoghktdp\",\"sourceConnectionReference\":{\"connectionName\":\"hcoeocnhzq\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datafyjzptwr\"},{\"targetEntityName\":\"h\",\"sourceEntityName\":\"qinfszpyglqd\",\"sourceConnectionReference\":{\"connectionName\":\"jzralc\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{}]},\"sourceDenormalizeInfo\":\"datasjoqcjenkyhfqzvs\"},{\"targetEntityName\":\"fxjelg\",\"sourceEntityName\":\"pzqjhhhq\",\"sourceConnectionReference\":{\"connectionName\":\"yvca\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{},{}]},\"sourceDenormalizeInfo\":\"datas\"},{\"targetEntityName\":\"usjszlbscm\",\"sourceEntityName\":\"zijiufehgmv\",\"sourceConnectionReference\":{\"connectionName\":\"wyvq\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"dataiylylyfw\"}],\"relationships\":[\"datatgqztwhghmup\",\"dataxyjtcdxabbujf\"]},{\"targetEntities\":[{\"name\":\"nbbklqpxzucafed\",\"properties\":{\"schema\":[{},{},{}],\"dslConnectorProperties\":[{}]}},{\"name\":\"fwxudgnhg\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"nbwgpbemeluclv\",\"parameters\":{\"hqfaqnvz\":\"dataukyrdnqodxahh\",\"emchgavsczuej\":\"dataqgyi\"}},\"linkedServiceType\":\"xptlghwzho\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"liuhqawmoaiancz\",\"value\":\"dataodrrslblxyd\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"vvbxiwkgfbqljnq\",\"sourceEntityName\":\"ychocokulehu\",\"sourceConnectionReference\":{\"connectionName\":\"rqffaweyurk\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{}]},\"sourceDenormalizeInfo\":\"dataav\"},{\"targetEntityName\":\"qdbrxmrgc\",\"sourceEntityName\":\"apx\",\"sourceConnectionReference\":{\"connectionName\":\"fjjkbajb\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datasioycblevpmcl\"}],\"relationships\":[\"dataxkyxlzgs\",\"datagkzz\"]},{\"targetEntities\":[{\"name\":\"hbzffovwmbjlzqs\",\"properties\":{\"schema\":[{},{}],\"dslConnectorProperties\":[{},{}]}},{\"name\":\"napfdqwowftpt\",\"properties\":{\"schema\":[{}],\"dslConnectorProperties\":[{},{},{}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"h\",\"parameters\":{\"hvyeldotj\":\"dataqyhleseyq\"}},\"linkedServiceType\":\"kwiswskukjtas\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"xkdtxfkndlqvtwkn\",\"value\":\"datammbugtywatmqaq\"},{\"name\":\"eatgroeshoy\",\"value\":\"datacbyfqxkf\"},{\"name\":\"ytehqpuvjmvqmt\",\"value\":\"datackygroejnndljdju\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"req\",\"sourceEntityName\":\"kceysfaqegplw\",\"sourceConnectionReference\":{\"connectionName\":\"hwddkvbxgkq\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{},{},{}]},\"sourceDenormalizeInfo\":\"datadacarvvlfnty\"}],\"relationships\":[\"dataoiwenazerohzrsq\"]}],\"policy\":{\"mode\":\"sxkdnwqapfgsdpc\",\"recurrence\":{\"frequency\":\"Minute\",\"interval\":1226216524}},\"allowVNetOverride\":false,\"status\":\"uuipldq\"}") - .toObject(ChangeDataCapture.class); - Assertions.assertEquals("ftkwq", model.folder().name()); - Assertions.assertEquals("pmvssehaep", model.description()); - Assertions.assertEquals("tczhupeuknijd", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("hgauacdixmxufrs", - model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("btsuahxs", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("rrscubiwsdrnp", - model.sourceConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("xgvttxp", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("an", - model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("jijkgqxnhmbke", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("aan", - model.targetConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("jkutycyarnr", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("hguabzoghktdp", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("hcoeocnhzq", - model.targetConnectionsInfo() - .get(0) - .dataMapperMappings() - .get(0) - .sourceConnectionReference() - .connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("sxkdnwqapfgsdpc", model.policy().mode()); - Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); - Assertions.assertEquals(1226216524, model.policy().recurrence().interval()); - Assertions.assertEquals(false, model.allowVNetOverride()); - Assertions.assertEquals("uuipldq", model.status()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ChangeDataCapture model - = new ChangeDataCapture().withFolder(new ChangeDataCaptureFolder().withName("ftkwq")) - .withDescription("pmvssehaep") - .withSourceConnectionsInfo( - Arrays.asList(new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList( - new MapperTable().withName("tczhupeuknijd") - .withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("djfbocyv") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable().withName("wikdmh") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("hgauacdixmxufrs") - .withParameters(mapOf("kfnozoeoqbvj", "dataqg", "ay", "datavefgwbmqjchntas", "wyvf", - "dataxbulpzealbmqkyo"))) - .withLinkedServiceType("btsuahxs") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("rrscubiwsdrnp").withValue("datawodiffjxc"), - new MapperDslConnectorProperties().withName("mmuabwi").withValue("datajogjonmc"), - new MapperDslConnectorProperties().withName("foyzbamwineof") - .withValue("datakak"), - new MapperDslConnectorProperties().withName("ldtve").withValue("dataoclzhz")))))) - .withTargetConnectionsInfo(Arrays.asList( - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList( - new MapperTable().withName("xgvttxp") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("mrdixtreki") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("ruffgllukk") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties())), - new MapperTable().withName("pqhvmblcouqehbhb") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("an") - .withParameters(mapOf("xldykalsygaolnjp", "datapmbltoormkfql", "mr", - "datanbmjksibjgsjjxxa", "yqegx", "dataad", "inbmh", "dataiv"))) - .withLinkedServiceType("jijkgqxnhmbke") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("aan").withValue("datagiycwkdtaawxwfek"), - new MapperDslConnectorProperties().withName("mrrqmbzmqkratb") - .withValue("datawbjsidbirkf"), - new MapperDslConnectorProperties().withName("sokdgoge") - .withValue("datajymrhbg"), - new MapperDslConnectorProperties().withName("ozkyewnfnzh").withValue("dataqo")))) - .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("jkutycyarnr") - .withSourceEntityName("hguabzoghktdp") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("hcoeocnhzq") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings( - Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping(), - new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datafyjzptwr"), - new DataMapperMapping().withTargetEntityName("h") - .withSourceEntityName("qinfszpyglqd") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("jzralc") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings( - Arrays.asList(new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datasjoqcjenkyhfqzvs"), - new DataMapperMapping().withTargetEntityName("fxjelg") - .withSourceEntityName("pzqjhhhq") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("yvca") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings().withAttributeMappings(Arrays - .asList(new MapperAttributeMapping(), new MapperAttributeMapping(), - new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datas"), - new DataMapperMapping().withTargetEntityName("usjszlbscm") - .withSourceEntityName("zijiufehgmv") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("wyvq") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), - new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("dataiylylyfw"))) - .withRelationships(Arrays.asList("datatgqztwhghmup", "dataxyjtcdxabbujf")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList( - new MapperTable() - .withName("nbbklqpxzucafed") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema(), - new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties())), - new MapperTable().withName("fwxudgnhg") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays - .asList( - new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("nbwgpbemeluclv") - .withParameters(mapOf("hqfaqnvz", "dataukyrdnqodxahh", "emchgavsczuej", "dataqgyi"))) - .withLinkedServiceType("xptlghwzho") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("liuhqawmoaiancz") - .withValue("dataodrrslblxyd")))) - .withDataMapperMappings( - Arrays.asList( - new DataMapperMapping().withTargetEntityName("vvbxiwkgfbqljnq") - .withSourceEntityName("ychocokulehu") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("rqffaweyurk") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("dataav"), - new DataMapperMapping().withTargetEntityName("qdbrxmrgc") - .withSourceEntityName("apx") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("fjjkbajb") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), - new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datasioycblevpmcl"))) - .withRelationships(Arrays.asList("dataxkyxlzgs", "datagkzz")), - new MapperTargetConnectionsInfo() - .withTargetEntities( - Arrays.asList( - new MapperTable().withName("hbzffovwmbjlzqs") - .withSchema(Arrays.asList(new MapperTableSchema(), new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties())), - new MapperTable() - .withName("napfdqwowftpt") - .withSchema(Arrays.asList(new MapperTableSchema())) - .withDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties())))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("h") - .withParameters(mapOf("hvyeldotj", "dataqyhleseyq"))) - .withLinkedServiceType("kwiswskukjtas") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("xkdtxfkndlqvtwkn") - .withValue("datammbugtywatmqaq"), - new MapperDslConnectorProperties().withName("eatgroeshoy").withValue("datacbyfqxkf"), - new MapperDslConnectorProperties() - .withName("ytehqpuvjmvqmt") - .withValue("datackygroejnndljdju")))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping().withTargetEntityName("req") - .withSourceEntityName("kceysfaqegplw") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("hwddkvbxgkq") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping(), - new MapperAttributeMapping(), new MapperAttributeMapping()))) - .withSourceDenormalizeInfo("datadacarvvlfnty"))) - .withRelationships(Arrays.asList("dataoiwenazerohzrsq")))) - .withPolicy(new MapperPolicy().withMode("sxkdnwqapfgsdpc") - .withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(1226216524))) - .withAllowVNetOverride(false) - .withStatus("uuipldq"); - model = BinaryData.fromObject(model).toObject(ChangeDataCapture.class); - Assertions.assertEquals("ftkwq", model.folder().name()); - Assertions.assertEquals("pmvssehaep", model.description()); - Assertions.assertEquals("tczhupeuknijd", model.sourceConnectionsInfo().get(0).sourceEntities().get(0).name()); - Assertions.assertEquals("hgauacdixmxufrs", - model.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("btsuahxs", model.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, model.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("rrscubiwsdrnp", - model.sourceConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("xgvttxp", model.targetConnectionsInfo().get(0).targetEntities().get(0).name()); - Assertions.assertEquals("an", - model.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("jijkgqxnhmbke", model.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, model.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("aan", - model.targetConnectionsInfo().get(0).connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("jkutycyarnr", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("hguabzoghktdp", - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("hcoeocnhzq", - model.targetConnectionsInfo() - .get(0) - .dataMapperMappings() - .get(0) - .sourceConnectionReference() - .connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.targetConnectionsInfo().get(0).dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("sxkdnwqapfgsdpc", model.policy().mode()); - Assertions.assertEquals(FrequencyType.MINUTE, model.policy().recurrence().frequency()); - Assertions.assertEquals(1226216524, model.policy().recurrence().interval()); - Assertions.assertEquals(false, model.allowVNetOverride()); - Assertions.assertEquals("uuipldq", model.status()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 4482d5ca1a32..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureFolder; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperPolicy; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"folder\":{\"name\":\"ywpednousxrljl\"},\"description\":\"pqkcbflzz\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"irrsufvt\"},\"linkedServiceType\":\"euq\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"wwie\"},\"linkedServiceType\":\"ut\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"lwzatvnejlo\"},\"linkedServiceType\":\"qladlpqlwtxshvo\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"hkvafcjektkg\"},\"linkedServiceType\":\"rifyrap\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"jh\"},\"linkedServiceType\":\"mlweagsx\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{},{},{},{}],\"relationships\":[\"datappt\",\"datareput\",\"datasdewnkzwyr\"]},{\"targetEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"oiqtvfruyinavbf\"},\"linkedServiceType\":\"vvrzdbrpdveyx\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datadmmxzszhvjf\",\"datajxth\",\"datajbg\",\"dataipc\"]}],\"policy\":{\"mode\":\"yap\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":487776404}},\"allowVNetOverride\":false,\"status\":\"ynuswdwdaugdgvs\"},\"name\":\"s\",\"type\":\"ihjkiajokj\",\"etag\":\"hcrywwfnsrrcj\",\"\":{\"l\":\"datawafjiba\",\"yxrrmnoxjw\":\"datatdije\",\"zvvidokvzqeadk\":\"datahulv\"},\"id\":\"drcxvolt\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ChangeDataCaptureResource response = manager.changeDataCaptures() - .define("uhkhnzsrgi") - .withExistingFactory("qxrbrdpznuyczlyl", "drziaxigeos") - .withSourceConnectionsInfo(Arrays.asList( - new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("brwhsqtzgmf")) - .withLinkedServiceType("ryexhdigmgs") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))), - new MapperSourceConnectionsInfo() - .withSourceEntities( - Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("sitzfvzlqspavxfp")) - .withLinkedServiceType("t") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))), - new MapperSourceConnectionsInfo() - .withSourceEntities( - Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("ngwldym")) - .withLinkedServiceType("hvvvrt") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties(), new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))))) - .withTargetConnectionsInfo( - Arrays - .asList( - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("gag")) - .withLinkedServiceType("cmlyhpxf") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships( - Arrays.asList("datanlcitoibgvaazfi", "dataaocfnffjxdcc", "datauzqwvckewlyrw")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("bawzafzdzhh")) - .withLinkedServiceType("xcelvawwj") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping())) - .withRelationships(Arrays.asList("datary", "datai", "datanmchsjuacd", "datavr")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("otzpepmlc")) - .withLinkedServiceType("d") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties(), - new MapperDslConnectorProperties(), new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), - new DataMapperMapping())) - .withRelationships(Arrays.asList("datalivk", "dataxwfk", "datan", "dataqfleh")), - new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList(new MapperTable(), new MapperTable(), new MapperTable())) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("xniibcilyg")) - .withLinkedServiceType("i") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays.asList(new MapperDslConnectorProperties()))) - .withDataMapperMappings(Arrays.asList(new DataMapperMapping(), new DataMapperMapping(), - new DataMapperMapping())) - .withRelationships(Arrays.asList("datamwhhig", "datawgqewcvljjhnners", "datartjmdepaunywk", - "dataucsrqfm")))) - .withPolicy(new MapperPolicy().withMode("yeczlxunhntsqsp") - .withRecurrence( - new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(1322060676))) - .withFolder(new ChangeDataCaptureFolder().withName("epglj")) - .withDescription("zqreprnf") - .withAllowVNetOverride(false) - .withStatus("xhottykfkwzk") - .withIfMatch("cy") - .create(); - - Assertions.assertEquals("drcxvolt", response.id()); - Assertions.assertEquals("ywpednousxrljl", response.folder().name()); - Assertions.assertEquals("pqkcbflzz", response.description()); - Assertions.assertEquals("irrsufvt", - response.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("euq", response.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, response.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("jh", - response.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("mlweagsx", response.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, response.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("yap", response.policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, response.policy().recurrence().frequency()); - Assertions.assertEquals(487776404, response.policy().recurrence().interval()); - Assertions.assertEquals(false, response.allowVNetOverride()); - Assertions.assertEquals("ynuswdwdaugdgvs", response.status()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java deleted file mode 100644 index 92bc51225171..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.changeDataCaptures() - .deleteWithResponse("kwkxlnl", "ba", "ycnjxyproqebsuij", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java deleted file mode 100644 index 96db0183bef9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesGetWithResponseMockTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"folder\":{\"name\":\"r\"},\"description\":\"nvozjudg\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"tmmxvuzofu\"},\"linkedServiceType\":\"abrsfuvajuzhid\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]}},{\"sourceEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"dxhfu\"},\"linkedServiceType\":\"x\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{}],\"connection\":{\"linkedService\":{\"referenceName\":\"zihrx\"},\"linkedServiceType\":\"ub\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{},{}]},\"dataMapperMappings\":[{},{}],\"relationships\":[\"datavzpvi\",\"dataqflmalmxvmrd\",\"datafanjk\"]}],\"policy\":{\"mode\":\"rlcjmslpgmebxw\",\"recurrence\":{\"frequency\":\"Second\",\"interval\":236180404}},\"allowVNetOverride\":true,\"status\":\"gakrpmjo\"},\"name\":\"d\",\"type\":\"ij\",\"etag\":\"ibc\",\"\":{\"ctvohywyvstvpksl\":\"datadtf\",\"fnhcklll\":\"datayhabgocqryliv\",\"fvanefwsodnlwon\":\"datagrkvlqqkpxvemj\"},\"id\":\"gqemjdtcx\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ChangeDataCaptureResource response = manager.changeDataCaptures() - .getWithResponse("s", "pr", "kjiiivbvkvo", "nos", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("gqemjdtcx", response.id()); - Assertions.assertEquals("r", response.folder().name()); - Assertions.assertEquals("nvozjudg", response.description()); - Assertions.assertEquals("tmmxvuzofu", - response.sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("abrsfuvajuzhid", - response.sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, response.sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("zihrx", - response.targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("ub", response.targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, response.targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("rlcjmslpgmebxw", response.policy().mode()); - Assertions.assertEquals(FrequencyType.SECOND, response.policy().recurrence().frequency()); - Assertions.assertEquals(236180404, response.policy().recurrence().interval()); - Assertions.assertEquals(true, response.allowVNetOverride()); - Assertions.assertEquals("gakrpmjo", response.status()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java deleted file mode 100644 index b11015f9f39a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesListByFactoryMockTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"folder\":{\"name\":\"mlaile\"},\"description\":\"usiv\",\"sourceConnectionsInfo\":[{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"jwmdtbx\"},\"linkedServiceType\":\"omc\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]}},{\"sourceEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"fwzqaelxd\"},\"linkedServiceType\":\"dfsteouzoglvt\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{},{},{}]}},{\"sourceEntities\":[{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"zrqkgibpeh\"},\"linkedServiceType\":\"ctzcm\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{}]}}],\"targetConnectionsInfo\":[{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"bwsdoaypixry\"},\"linkedServiceType\":\"lbzxyejoxd\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{}]},\"dataMapperMappings\":[{},{},{}],\"relationships\":[\"datanptbpiccriqhiwy\"]},{\"targetEntities\":[{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"fhlmgpar\"},\"linkedServiceType\":\"rgwmge\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"datavdryxouqwt\"]},{\"targetEntities\":[{},{},{},{}],\"connection\":{\"linkedService\":{\"referenceName\":\"awxslstekbbqq\"},\"linkedServiceType\":\"psxycvoexbx\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{},{},{}]},\"dataMapperMappings\":[{}],\"relationships\":[\"databracgmnelozzfwy\"]}],\"policy\":{\"mode\":\"dutfktm\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":1329023766}},\"allowVNetOverride\":false,\"status\":\"trkesdfujfpnwfz\"},\"name\":\"lqmsybvjfnuyoyd\",\"type\":\"fknnlasfbpjyvu\",\"etag\":\"exlpmbtmc\",\"\":{\"yexamsgfvuffdvuk\":\"databp\",\"wxc\":\"dataykmdxnrgmjpcke\"},\"id\":\"ualb\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.changeDataCaptures().listByFactory("stybom", "yjfjsseem", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("ualb", response.iterator().next().id()); - Assertions.assertEquals("mlaile", response.iterator().next().folder().name()); - Assertions.assertEquals("usiv", response.iterator().next().description()); - Assertions.assertEquals("jwmdtbx", - response.iterator().next().sourceConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("omc", - response.iterator().next().sourceConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.iterator().next().sourceConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(true, - response.iterator().next().sourceConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("bwsdoaypixry", - response.iterator().next().targetConnectionsInfo().get(0).connection().linkedService().referenceName()); - Assertions.assertEquals("lbzxyejoxd", - response.iterator().next().targetConnectionsInfo().get(0).connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - response.iterator().next().targetConnectionsInfo().get(0).connection().type()); - Assertions.assertEquals(false, - response.iterator().next().targetConnectionsInfo().get(0).connection().isInlineDataset()); - Assertions.assertEquals("dutfktm", response.iterator().next().policy().mode()); - Assertions.assertEquals(FrequencyType.HOUR, response.iterator().next().policy().recurrence().frequency()); - Assertions.assertEquals(1329023766, response.iterator().next().policy().recurrence().interval()); - Assertions.assertEquals(false, response.iterator().next().allowVNetOverride()); - Assertions.assertEquals("trkesdfujfpnwfz", response.iterator().next().status()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java deleted file mode 100644 index 3310980c88d9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStartWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesStartWithResponseMockTests { - @Test - public void testStartWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.changeDataCaptures() - .startWithResponse("mwbshqpjueo", "htltooikzouv", "krejuuii", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java deleted file mode 100644 index db91333f36bd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStatusWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesStatusWithResponseMockTests { - @Test - public void testStatusWithResponse() throws Exception { - String responseStr = "\"xbiox\""; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - String response = manager.changeDataCaptures() - .statusWithResponse("xbis", "yitjov", "rirgsqsoac", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("xbiox", response); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java deleted file mode 100644 index 9196fb6ba596..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ChangeDataCapturesStopWithResponseMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ChangeDataCapturesStopWithResponseMockTests { - @Test - public void testStopWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.changeDataCaptures().stopWithResponse("rbtfarb", "arxyh", "ukc", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java deleted file mode 100644 index 5bddc381a80c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CmkIdentityDefinitionTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CmkIdentityDefinition; -import org.junit.jupiter.api.Assertions; - -public final class CmkIdentityDefinitionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CmkIdentityDefinition model - = BinaryData.fromString("{\"userAssignedIdentity\":\"lexxbczwtru\"}").toObject(CmkIdentityDefinition.class); - Assertions.assertEquals("lexxbczwtru", model.userAssignedIdentity()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CmkIdentityDefinition model = new CmkIdentityDefinition().withUserAssignedIdentity("lexxbczwtru"); - model = BinaryData.fromObject(model).toObject(CmkIdentityDefinition.class); - Assertions.assertEquals("lexxbczwtru", model.userAssignedIdentity()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java deleted file mode 100644 index 0170619e0f7d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsEntityDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CommonDataServiceForAppsEntityDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CommonDataServiceForAppsEntityDataset model = BinaryData.fromString( - "{\"type\":\"CommonDataServiceForAppsEntity\",\"typeProperties\":{\"entityName\":\"dataweuiyx\"},\"description\":\"kzts\",\"structure\":\"datafbevyllz\",\"schema\":\"datahkqytkzta\",\"linkedServiceName\":{\"referenceName\":\"opgfzdgjfcy\",\"parameters\":{\"yhigqkzjuqwqaj\":\"datavlo\",\"xhyoip\":\"datauzxp\",\"bgsosc\":\"dataf\"}},\"parameters\":{\"ekwwnthropmdudsy\":{\"type\":\"SecureString\",\"defaultValue\":\"datafvbennmfkbpjnr\"},\"youergaghp\":{\"type\":\"Float\",\"defaultValue\":\"dataztvktjhffecqko\"},\"yedzfzq\":{\"type\":\"String\",\"defaultValue\":\"datakpyehhfdyldh\"},\"jlwyxedzn\":{\"type\":\"Int\",\"defaultValue\":\"dataqhtdereunokakzwh\"}},\"annotations\":[\"datafomckewv\"],\"folder\":{\"name\":\"fopxf\"},\"\":{\"pt\":\"datapdyzoutx\",\"dgaaqwvkgjpy\":\"datafhgnuywezygv\",\"nogehlufbort\":\"datapmpv\",\"xyji\":\"datanukkfaxzsvb\"}}") - .toObject(CommonDataServiceForAppsEntityDataset.class); - Assertions.assertEquals("kzts", model.description()); - Assertions.assertEquals("opgfzdgjfcy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ekwwnthropmdudsy").type()); - Assertions.assertEquals("fopxf", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CommonDataServiceForAppsEntityDataset model - = new CommonDataServiceForAppsEntityDataset().withDescription("kzts") - .withStructure("datafbevyllz") - .withSchema("datahkqytkzta") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("opgfzdgjfcy") - .withParameters(mapOf("yhigqkzjuqwqaj", "datavlo", "xhyoip", "datauzxp", "bgsosc", "dataf"))) - .withParameters(mapOf("ekwwnthropmdudsy", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datafvbennmfkbpjnr"), - "youergaghp", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataztvktjhffecqko"), - "yedzfzq", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datakpyehhfdyldh"), - "jlwyxedzn", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataqhtdereunokakzwh"))) - .withAnnotations(Arrays.asList("datafomckewv")) - .withFolder(new DatasetFolder().withName("fopxf")) - .withEntityName("dataweuiyx"); - model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsEntityDataset.class); - Assertions.assertEquals("kzts", model.description()); - Assertions.assertEquals("opgfzdgjfcy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ekwwnthropmdudsy").type()); - Assertions.assertEquals("fopxf", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java deleted file mode 100644 index 92a746d10cad..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsEntityDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CommonDataServiceForAppsEntityDatasetTypeProperties; - -public final class CommonDataServiceForAppsEntityDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CommonDataServiceForAppsEntityDatasetTypeProperties model - = BinaryData.fromString("{\"entityName\":\"datakobqoclflioe\"}") - .toObject(CommonDataServiceForAppsEntityDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CommonDataServiceForAppsEntityDatasetTypeProperties model - = new CommonDataServiceForAppsEntityDatasetTypeProperties().withEntityName("datakobqoclflioe"); - model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsEntityDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java deleted file mode 100644 index 5f6b879677ee..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CommonDataServiceForAppsSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CommonDataServiceForAppsSource; - -public final class CommonDataServiceForAppsSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CommonDataServiceForAppsSource model = BinaryData.fromString( - "{\"type\":\"CommonDataServiceForAppsSource\",\"query\":\"datahdjbyfdfuaj\",\"additionalColumns\":\"datap\",\"sourceRetryCount\":\"dataleku\",\"sourceRetryWait\":\"datawvjvzznyjqbw\",\"maxConcurrentConnections\":\"datawjvfisloqut\",\"disableMetricsCollection\":\"databzwgjupjbd\",\"\":{\"r\":\"datakiajqsshupxjttn\",\"om\":\"datamerqzapu\"}}") - .toObject(CommonDataServiceForAppsSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CommonDataServiceForAppsSource model = new CommonDataServiceForAppsSource().withSourceRetryCount("dataleku") - .withSourceRetryWait("datawvjvzznyjqbw") - .withMaxConcurrentConnections("datawjvfisloqut") - .withDisableMetricsCollection("databzwgjupjbd") - .withQuery("datahdjbyfdfuaj") - .withAdditionalColumns("datap"); - model = BinaryData.fromObject(model).toObject(CommonDataServiceForAppsSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java deleted file mode 100644 index 4b2977039d99..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CompressionReadSettingsTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class CompressionReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CompressionReadSettings model = BinaryData.fromString( - "{\"type\":\"CompressionReadSettings\",\"\":{\"ta\":\"databpnhjoclvfzwtkrm\",\"npixhulfjl\":\"datayqeesg\"}}") - .toObject(CompressionReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CompressionReadSettings model - = new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")); - model = BinaryData.fromObject(model).toObject(CompressionReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java deleted file mode 100644 index 7ab383be2f1d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurObjectDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConcurObjectDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ConcurObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConcurObjectDataset model = BinaryData.fromString( - "{\"type\":\"ConcurObject\",\"typeProperties\":{\"tableName\":\"dataxamqecjrzvlcivqx\"},\"description\":\"mklphxwww\",\"structure\":\"datajkbgnfbr\",\"schema\":\"datavfsunhaevla\",\"linkedServiceName\":{\"referenceName\":\"xczywywu\",\"parameters\":{\"rfgimomggewdqbxe\":\"datacorewcnnaaxqjfda\",\"sfx\":\"datafyznvussuqksl\",\"wpmohnrtlikffyd\":\"datayzqbye\",\"fwvzdteqjm\":\"datatkqrfbgyn\"}},\"parameters\":{\"jyoxxjxb\":{\"type\":\"Array\",\"defaultValue\":\"datagkaxnypr\"},\"emqom\":{\"type\":\"Int\",\"defaultValue\":\"datarrlccklyfpjmspa\"},\"hcaptkhjx\":{\"type\":\"Int\",\"defaultValue\":\"datalknuyapvibzicyvi\"}},\"annotations\":[\"databnvfccklzhznfgv\"],\"folder\":{\"name\":\"xmnctigpksywi\"},\"\":{\"efuhb\":\"dataktgkdprtqjytdc\",\"caytnpkvbpbltcws\":\"datawbvjsbgmlamoa\"}}") - .toObject(ConcurObjectDataset.class); - Assertions.assertEquals("mklphxwww", model.description()); - Assertions.assertEquals("xczywywu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jyoxxjxb").type()); - Assertions.assertEquals("xmnctigpksywi", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConcurObjectDataset model = new ConcurObjectDataset().withDescription("mklphxwww") - .withStructure("datajkbgnfbr") - .withSchema("datavfsunhaevla") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xczywywu") - .withParameters(mapOf("rfgimomggewdqbxe", "datacorewcnnaaxqjfda", "sfx", "datafyznvussuqksl", - "wpmohnrtlikffyd", "datayzqbye", "fwvzdteqjm", "datatkqrfbgyn"))) - .withParameters(mapOf("jyoxxjxb", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagkaxnypr"), "emqom", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datarrlccklyfpjmspa"), - "hcaptkhjx", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalknuyapvibzicyvi"))) - .withAnnotations(Arrays.asList("databnvfccklzhznfgv")) - .withFolder(new DatasetFolder().withName("xmnctigpksywi")) - .withTableName("dataxamqecjrzvlcivqx"); - model = BinaryData.fromObject(model).toObject(ConcurObjectDataset.class); - Assertions.assertEquals("mklphxwww", model.description()); - Assertions.assertEquals("xczywywu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jyoxxjxb").type()); - Assertions.assertEquals("xmnctigpksywi", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java deleted file mode 100644 index 73d4ffa1a387..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConcurSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConcurSource; - -public final class ConcurSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConcurSource model = BinaryData.fromString( - "{\"type\":\"ConcurSource\",\"query\":\"datakrjolbaegha\",\"queryTimeout\":\"datascismr\",\"additionalColumns\":\"dataeklf\",\"sourceRetryCount\":\"datanys\",\"sourceRetryWait\":\"datalxt\",\"maxConcurrentConnections\":\"dataodwqzbiuk\",\"disableMetricsCollection\":\"datafyfvyzaofaiwlnfv\",\"\":{\"jluaywgcjqnfa\":\"datauuqafolseyxpgkml\",\"slwve\":\"datapyglnfw\",\"ekst\":\"datablucpmqwkfgmkp\",\"ajfers\":\"dataqzhdwr\"}}") - .toObject(ConcurSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConcurSource model = new ConcurSource().withSourceRetryCount("datanys") - .withSourceRetryWait("datalxt") - .withMaxConcurrentConnections("dataodwqzbiuk") - .withDisableMetricsCollection("datafyfvyzaofaiwlnfv") - .withQueryTimeout("datascismr") - .withAdditionalColumns("dataeklf") - .withQuery("datakrjolbaegha"); - model = BinaryData.fromObject(model).toObject(ConcurSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java deleted file mode 100644 index 0efc656103cd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ConnectionStatePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; - -public final class ConnectionStatePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConnectionStateProperties model = BinaryData.fromString( - "{\"actionsRequired\":\"ppdbwnupgahxkum\",\"description\":\"jcaacfdmmcpugm\",\"status\":\"qepvufhbzeh\"}") - .toObject(ConnectionStateProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConnectionStateProperties model = new ConnectionStateProperties(); - model = BinaryData.fromObject(model).toObject(ConnectionStateProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java deleted file mode 100644 index ce72f5c6eec1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ControlActivityTests.java +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.ControlActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ControlActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ControlActivity model = BinaryData.fromString( - "{\"type\":\"Container\",\"name\":\"qvz\",\"description\":\"kxufuw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"kuiveftugiwsvlf\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"rfufzaysfzuvuo\":\"datargnx\"}},{\"activity\":\"zyfqcjclvbqo\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"vfcubxlmqh\":\"dataphnazp\",\"hzyfuupqkr\":\"datadbqrlb\",\"j\":\"datavdjpcevbkkgq\"}},{\"activity\":\"gphhpwxghwwfqzw\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Completed\"],\"\":{\"jsqepxft\":\"datad\",\"pjs\":\"dataifadsliifdrbs\"}}],\"userProperties\":[{\"name\":\"bsnxmfooi\",\"value\":\"datapn\"}],\"\":{\"uxktsxnikcgitt\":\"dataqgcuwgkmqcbrewt\",\"sjeb\":\"datamlpihtep\"}}") - .toObject(ControlActivity.class); - Assertions.assertEquals("qvz", model.name()); - Assertions.assertEquals("kxufuw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("kuiveftugiwsvlf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bsnxmfooi", model.userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ControlActivity model = new ControlActivity().withName("qvz") - .withDescription("kxufuw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("kuiveftugiwsvlf") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zyfqcjclvbqo") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gphhpwxghwwfqzw") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("bsnxmfooi").withValue("datapn"))); - model = BinaryData.fromObject(model).toObject(ControlActivity.class); - Assertions.assertEquals("qvz", model.name()); - Assertions.assertEquals("kxufuw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("kuiveftugiwsvlf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bsnxmfooi", model.userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java deleted file mode 100644 index c9f9ed6ad7e7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityLogSettingsTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopyActivityLogSettings; - -public final class CopyActivityLogSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopyActivityLogSettings model - = BinaryData.fromString("{\"logLevel\":\"dataopyq\",\"enableReliableLogging\":\"datapbxspvkcng\"}") - .toObject(CopyActivityLogSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopyActivityLogSettings model - = new CopyActivityLogSettings().withLogLevel("dataopyq").withEnableReliableLogging("datapbxspvkcng"); - model = BinaryData.fromObject(model).toObject(CopyActivityLogSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java deleted file mode 100644 index 513f45cf7a29..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTests.java +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.CopyActivity; -import com.azure.resourcemanager.datafactory.models.CopyActivityLogSettings; -import com.azure.resourcemanager.datafactory.models.CopySink; -import com.azure.resourcemanager.datafactory.models.CopySource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.LogSettings; -import com.azure.resourcemanager.datafactory.models.LogStorageSettings; -import com.azure.resourcemanager.datafactory.models.RedirectIncompatibleRowSettings; -import com.azure.resourcemanager.datafactory.models.SkipErrorFile; -import com.azure.resourcemanager.datafactory.models.StagingSettings; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CopyActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopyActivity model = BinaryData.fromString( - "{\"type\":\"Copy\",\"typeProperties\":{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datagxnhfcccdu\",\"sourceRetryWait\":\"datayb\",\"maxConcurrentConnections\":\"databhmpmeglolpotclm\",\"disableMetricsCollection\":\"datapqjryty\",\"\":{\"govvivlrxytraf\":\"dataojjhtnnthj\"}},\"sink\":{\"type\":\"CopySink\",\"writeBatchSize\":\"datatwyt\",\"writeBatchTimeout\":\"datapiilgyqluolgspyq\",\"sinkRetryCount\":\"datapnhhv\",\"sinkRetryWait\":\"dataukourqviyf\",\"maxConcurrentConnections\":\"dataegwezgfqo\",\"disableMetricsCollection\":\"datatcp\",\"\":{\"y\":\"dataz\",\"jckakikkkajmnvb\":\"datagzba\",\"yco\":\"datagmnkrq\"}},\"translator\":\"datagkxxpkl\",\"enableStaging\":\"datavbcgs\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"daypx\",\"parameters\":{\"kigmjnktt\":\"datadf\",\"tzfliqntnoe\":\"datazyvzixmusiidivbb\",\"wdaiexisapygii\":\"dataxoqpuclidy\"}},\"path\":\"datakaffzz\",\"enableCompression\":\"dataivfiypfvwyzjsi\",\"\":{\"grxmptu\":\"datav\",\"bpqghxdp\":\"datade\",\"udbiacuqouc\":\"dataihfimlyxdmix\",\"pnzijpyyvecruhqy\":\"datafuvuslvbuj\"}},\"parallelCopies\":\"datadsthktsaljkhlpg\",\"dataIntegrationUnits\":\"datag\",\"enableSkipIncompatibleRow\":\"datako\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"dataxw\",\"path\":\"datathivapuaxosw\",\"\":{\"fezlmzsekv\":\"datahlrzlgkcnpdkwer\",\"vofrenuvp\":\"datauzyowra\",\"tfpbxnret\":\"datapltnyyeyj\",\"xtoh\":\"datag\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"qtxi\",\"parameters\":{\"nxb\":\"dataeyzz\",\"lfyf\":\"datavy\",\"jhoxtbsybpefojp\":\"datasb\",\"yifeiiriomjdnkn\":\"dataixdgqjkfvmrnwgea\"}},\"path\":\"datalxrd\",\"logLevel\":\"dataqtzzhkpko\",\"enableReliableLogging\":\"datavfno\",\"\":{\"dtgxsyfuhgmmzx\":\"datahutv\",\"ppjqcwcpdaoskgt\":\"datasrl\",\"mpbgrosx\":\"datalljsoasxjjk\",\"cbnmbbhluvd\":\"datadx\"}},\"logSettings\":{\"enableCopyActivityLog\":\"datauevnomzlre\",\"copyActivityLogSettings\":{\"logLevel\":\"dataskiegtaenalep\",\"enableReliableLogging\":\"dataasqolxaodb\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"gxbadborq\",\"parameters\":{\"batrvi\":\"datahggvyhqwy\",\"gxifkdsclqwkd\":\"datanls\"}},\"path\":\"dataepuvambzf\"}},\"preserveRules\":[\"datahaqfueqfniagzm\",\"dataxsoxqarjt\",\"datanlllm\",\"dataiyguuhylzbdimtd\"],\"preserve\":[\"dataxfqy\",\"datauwcilxznxhbttkk\",\"datacxjxueilixzjvkqj\"],\"validateDataConsistency\":\"datablhcmxx\",\"skipErrorFile\":{\"fileMissing\":\"datakxclj\",\"dataInconsistency\":\"datamsfsquxxqcimnchv\"}},\"inputs\":[{\"referenceName\":\"rivagcsmrtepsyb\",\"parameters\":{\"sfjdcokb\":\"datafoz\",\"eltnby\":\"databpqelmszobt\",\"u\":\"databgrdrumu\",\"kwv\":\"datawecdsybiazfvx\"}},{\"referenceName\":\"eqly\",\"parameters\":{\"odku\":\"dataqqonkrekiojusm\",\"yykx\":\"dataycntaov\"}}],\"outputs\":[{\"referenceName\":\"kd\",\"parameters\":{\"lhj\":\"dataywrwvs\",\"qygszhpnatltj\":\"datab\"}},{\"referenceName\":\"kqzfwl\",\"parameters\":{\"yyu\":\"datanmgsbubzf\"}}],\"linkedServiceName\":{\"referenceName\":\"cwrtr\",\"parameters\":{\"ghv\":\"datarzsnfgmohh\",\"vwfao\":\"datamvvfpkymqn\",\"fgqoarmtuprqtcxq\":\"dataulboawzplwg\"}},\"policy\":{\"timeout\":\"dataupya\",\"retry\":\"datadejparjvsbozfjb\",\"retryIntervalInSeconds\":1282083345,\"secureInput\":false,\"secureOutput\":true,\"\":{\"bklhwrikr\":\"databwmrdl\"}},\"name\":\"ljbhgzffemry\",\"description\":\"ao\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"cqu\",\"dependencyConditions\":[\"Failed\"],\"\":{\"fixrukwxcaagzlqo\":\"dataliibxolzinxx\",\"fiyrywf\":\"datagzgsgzlbunmjha\"}}],\"userProperties\":[{\"name\":\"rreebjmslbxf\",\"value\":\"dataiiarlldy\"}],\"\":{\"uebrvrhwqkfff\":\"datadtykhsafrfv\",\"ei\":\"datagbk\",\"sbebvkmtljzilk\":\"dataybwh\",\"pitzqrmxcuk\":\"datavybljqgi\"}}") - .toObject(CopyActivity.class); - Assertions.assertEquals("ljbhgzffemry", model.name()); - Assertions.assertEquals("ao", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("cqu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("rreebjmslbxf", model.userProperties().get(0).name()); - Assertions.assertEquals("cwrtr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1282083345, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("rivagcsmrtepsyb", model.inputs().get(0).referenceName()); - Assertions.assertEquals("kd", model.outputs().get(0).referenceName()); - Assertions.assertEquals("daypx", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("qtxi", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("gxbadborq", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopyActivity model = new CopyActivity().withName("ljbhgzffemry") - .withDescription("ao") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("cqu") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("rreebjmslbxf").withValue("dataiiarlldy"))) - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("cwrtr") - .withParameters(mapOf("ghv", "datarzsnfgmohh", "vwfao", "datamvvfpkymqn", "fgqoarmtuprqtcxq", - "dataulboawzplwg"))) - .withPolicy(new ActivityPolicy().withTimeout("dataupya") - .withRetry("datadejparjvsbozfjb") - .withRetryIntervalInSeconds(1282083345) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withInputs(Arrays.asList( - new DatasetReference().withReferenceName("rivagcsmrtepsyb") - .withParameters(mapOf("sfjdcokb", "datafoz", "eltnby", "databpqelmszobt", "u", "databgrdrumu", - "kwv", "datawecdsybiazfvx")), - new DatasetReference().withReferenceName("eqly") - .withParameters(mapOf("odku", "dataqqonkrekiojusm", "yykx", "dataycntaov")))) - .withOutputs(Arrays.asList( - new DatasetReference().withReferenceName("kd") - .withParameters(mapOf("lhj", "dataywrwvs", "qygszhpnatltj", "datab")), - new DatasetReference().withReferenceName("kqzfwl").withParameters(mapOf("yyu", "datanmgsbubzf")))) - .withSource(new CopySource().withSourceRetryCount("datagxnhfcccdu") - .withSourceRetryWait("datayb") - .withMaxConcurrentConnections("databhmpmeglolpotclm") - .withDisableMetricsCollection("datapqjryty") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withSink(new CopySink().withWriteBatchSize("datatwyt") - .withWriteBatchTimeout("datapiilgyqluolgspyq") - .withSinkRetryCount("datapnhhv") - .withSinkRetryWait("dataukourqviyf") - .withMaxConcurrentConnections("dataegwezgfqo") - .withDisableMetricsCollection("datatcp") - .withAdditionalProperties(mapOf("type", "CopySink"))) - .withTranslator("datagkxxpkl") - .withEnableStaging("datavbcgs") - .withStagingSettings(new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("daypx") - .withParameters(mapOf("kigmjnktt", "datadf", "tzfliqntnoe", "datazyvzixmusiidivbb", - "wdaiexisapygii", "dataxoqpuclidy"))) - .withPath("datakaffzz") - .withEnableCompression("dataivfiypfvwyzjsi") - .withAdditionalProperties(mapOf())) - .withParallelCopies("datadsthktsaljkhlpg") - .withDataIntegrationUnits("datag") - .withEnableSkipIncompatibleRow("datako") - .withRedirectIncompatibleRowSettings(new RedirectIncompatibleRowSettings().withLinkedServiceName("dataxw") - .withPath("datathivapuaxosw") - .withAdditionalProperties(mapOf())) - .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qtxi") - .withParameters(mapOf("nxb", "dataeyzz", "lfyf", "datavy", "jhoxtbsybpefojp", "datasb", - "yifeiiriomjdnkn", "dataixdgqjkfvmrnwgea"))) - .withPath("datalxrd") - .withLogLevel("dataqtzzhkpko") - .withEnableReliableLogging("datavfno") - .withAdditionalProperties(mapOf())) - .withLogSettings(new LogSettings().withEnableCopyActivityLog("datauevnomzlre") - .withCopyActivityLogSettings(new CopyActivityLogSettings().withLogLevel("dataskiegtaenalep") - .withEnableReliableLogging("dataasqolxaodb")) - .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gxbadborq") - .withParameters(mapOf("batrvi", "datahggvyhqwy", "gxifkdsclqwkd", "datanls"))) - .withPath("dataepuvambzf"))) - .withPreserveRules(Arrays.asList("datahaqfueqfniagzm", "dataxsoxqarjt", "datanlllm", "dataiyguuhylzbdimtd")) - .withPreserve(Arrays.asList("dataxfqy", "datauwcilxznxhbttkk", "datacxjxueilixzjvkqj")) - .withValidateDataConsistency("datablhcmxx") - .withSkipErrorFile( - new SkipErrorFile().withFileMissing("datakxclj").withDataInconsistency("datamsfsquxxqcimnchv")); - model = BinaryData.fromObject(model).toObject(CopyActivity.class); - Assertions.assertEquals("ljbhgzffemry", model.name()); - Assertions.assertEquals("ao", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("cqu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("rreebjmslbxf", model.userProperties().get(0).name()); - Assertions.assertEquals("cwrtr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1282083345, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("rivagcsmrtepsyb", model.inputs().get(0).referenceName()); - Assertions.assertEquals("kd", model.outputs().get(0).referenceName()); - Assertions.assertEquals("daypx", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("qtxi", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("gxbadborq", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java deleted file mode 100644 index 867988d59809..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyActivityTypePropertiesTests.java +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CopyActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.CopyActivityLogSettings; -import com.azure.resourcemanager.datafactory.models.CopySink; -import com.azure.resourcemanager.datafactory.models.CopySource; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.LogSettings; -import com.azure.resourcemanager.datafactory.models.LogStorageSettings; -import com.azure.resourcemanager.datafactory.models.RedirectIncompatibleRowSettings; -import com.azure.resourcemanager.datafactory.models.SkipErrorFile; -import com.azure.resourcemanager.datafactory.models.StagingSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CopyActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopyActivityTypeProperties model = BinaryData.fromString( - "{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datakggqxqanrkhcdjfs\",\"sourceRetryWait\":\"databjcn\",\"maxConcurrentConnections\":\"datawrbrntvhpp\",\"disableMetricsCollection\":\"datarlzual\",\"\":{\"abn\":\"datapolatorjm\",\"ig\":\"datauxlthyxryvwnzh\",\"bffcvtij\":\"datadgbcnqvbn\",\"gvgogczgcm\":\"datalemzrw\"}},\"sink\":{\"type\":\"CopySink\",\"writeBatchSize\":\"datamkwddgyqe\",\"writeBatchTimeout\":\"datacr\",\"sinkRetryCount\":\"dataamlrtcbvif\",\"sinkRetryWait\":\"datanxstowvgw\",\"maxConcurrentConnections\":\"datavsqlyahlaoqkci\",\"disableMetricsCollection\":\"databuzvaxl\",\"\":{\"rqzpfpbxl\":\"datanwhictsauv\",\"ldonsekazxewnlpc\":\"dataddkkoyzsyjvk\",\"xmyfrmfclkyncjya\":\"datahczqm\",\"duabqbverbjcts\":\"datazzcbohbbavode\"}},\"translator\":\"datavhxnjo\",\"enableStaging\":\"datapc\",\"stagingSettings\":{\"linkedServiceName\":{\"referenceName\":\"dlppuk\",\"parameters\":{\"efivozrdzrik\":\"datanpclnmjkydh\"}},\"path\":\"dataucvvrkxpbjg\",\"enableCompression\":\"dataoelamerpbctrwrv\",\"\":{\"fajlgxrsn\":\"datamacbrywqqezt\",\"gywkin\":\"datatrooaahhvs\"}},\"parallelCopies\":\"datavtx\",\"dataIntegrationUnits\":\"datameb\",\"enableSkipIncompatibleRow\":\"datain\",\"redirectIncompatibleRowSettings\":{\"linkedServiceName\":\"datadk\",\"path\":\"dataqjj\",\"\":{\"tvcjd\":\"dataoxkcttpc\"}},\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"k\",\"parameters\":{\"gosrbullqnfzsegu\":\"datahvcrjqzbmyftzbx\",\"tzdvx\":\"datasbzmixwaxtnk\",\"ovcbdsr\":\"datagdaajlhgsuqmrky\",\"ingadkrkny\":\"datahpqlxnbdjt\"}},\"path\":\"datangdfzqcjfqmy\",\"logLevel\":\"datawbuxqzfwgbqsvexz\",\"enableReliableLogging\":\"datafwiav\",\"\":{\"cbxrskylq\":\"datatgxdlznfo\",\"teikktret\":\"datapp\",\"nvb\":\"datatsygzjplaxxfnrlt\"}},\"logSettings\":{\"enableCopyActivityLog\":\"datat\",\"copyActivityLogSettings\":{\"logLevel\":\"datakrrp\",\"enableReliableLogging\":\"datadoli\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"izsglavdtttyd\",\"parameters\":{\"ypwmveyrcikedmo\":\"datamzrqkjqcsh\"}},\"path\":\"datajuqowuicvjyj\"}},\"preserveRules\":[\"dataleuqxhmr\",\"datalwkcgu\",\"datavpvta\",\"datalxxzn\"],\"preserve\":[\"dataqqzikvgwqiwod\",\"dataaslp\"],\"validateDataConsistency\":\"dataexrzxvff\",\"skipErrorFile\":{\"fileMissing\":\"datatdmhr\",\"dataInconsistency\":\"databyul\"}}") - .toObject(CopyActivityTypeProperties.class); - Assertions.assertEquals("dlppuk", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("k", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("izsglavdtttyd", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopyActivityTypeProperties model = new CopyActivityTypeProperties() - .withSource(new CopySource().withSourceRetryCount("datakggqxqanrkhcdjfs") - .withSourceRetryWait("databjcn") - .withMaxConcurrentConnections("datawrbrntvhpp") - .withDisableMetricsCollection("datarlzual") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withSink(new CopySink().withWriteBatchSize("datamkwddgyqe") - .withWriteBatchTimeout("datacr") - .withSinkRetryCount("dataamlrtcbvif") - .withSinkRetryWait("datanxstowvgw") - .withMaxConcurrentConnections("datavsqlyahlaoqkci") - .withDisableMetricsCollection("databuzvaxl") - .withAdditionalProperties(mapOf("type", "CopySink"))) - .withTranslator("datavhxnjo") - .withEnableStaging("datapc") - .withStagingSettings(new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dlppuk") - .withParameters(mapOf("efivozrdzrik", "datanpclnmjkydh"))) - .withPath("dataucvvrkxpbjg") - .withEnableCompression("dataoelamerpbctrwrv") - .withAdditionalProperties(mapOf())) - .withParallelCopies("datavtx") - .withDataIntegrationUnits("datameb") - .withEnableSkipIncompatibleRow("datain") - .withRedirectIncompatibleRowSettings(new RedirectIncompatibleRowSettings().withLinkedServiceName("datadk") - .withPath("dataqjj") - .withAdditionalProperties(mapOf())) - .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("k") - .withParameters(mapOf("gosrbullqnfzsegu", "datahvcrjqzbmyftzbx", "tzdvx", "datasbzmixwaxtnk", - "ovcbdsr", "datagdaajlhgsuqmrky", "ingadkrkny", "datahpqlxnbdjt"))) - .withPath("datangdfzqcjfqmy") - .withLogLevel("datawbuxqzfwgbqsvexz") - .withEnableReliableLogging("datafwiav") - .withAdditionalProperties(mapOf())) - .withLogSettings( - new LogSettings().withEnableCopyActivityLog("datat") - .withCopyActivityLogSettings( - new CopyActivityLogSettings().withLogLevel("datakrrp").withEnableReliableLogging("datadoli")) - .withLogLocationSettings( - new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("izsglavdtttyd") - .withParameters(mapOf("ypwmveyrcikedmo", "datamzrqkjqcsh"))) - .withPath("datajuqowuicvjyj"))) - .withPreserveRules(Arrays.asList("dataleuqxhmr", "datalwkcgu", "datavpvta", "datalxxzn")) - .withPreserve(Arrays.asList("dataqqzikvgwqiwod", "dataaslp")) - .withValidateDataConsistency("dataexrzxvff") - .withSkipErrorFile(new SkipErrorFile().withFileMissing("datatdmhr").withDataInconsistency("databyul")); - model = BinaryData.fromObject(model).toObject(CopyActivityTypeProperties.class); - Assertions.assertEquals("dlppuk", model.stagingSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("k", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("izsglavdtttyd", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java deleted file mode 100644 index 13a5cf4bc279..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyComputeScalePropertiesTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopyComputeScaleProperties; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CopyComputeScalePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopyComputeScaleProperties model = BinaryData.fromString( - "{\"dataIntegrationUnit\":32236335,\"timeToLive\":894354282,\"\":{\"z\":\"datanqpvjtshlwvrsks\",\"wfbwoet\":\"datahwtsyp\",\"xhflgdu\":\"dataizrf\"}}") - .toObject(CopyComputeScaleProperties.class); - Assertions.assertEquals(32236335, model.dataIntegrationUnit()); - Assertions.assertEquals(894354282, model.timeToLive()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopyComputeScaleProperties model = new CopyComputeScaleProperties().withDataIntegrationUnit(32236335) - .withTimeToLive(894354282) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(CopyComputeScaleProperties.class); - Assertions.assertEquals(32236335, model.dataIntegrationUnit()); - Assertions.assertEquals(894354282, model.timeToLive()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java deleted file mode 100644 index 2826e0387500..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySinkTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopySink; -import java.util.HashMap; -import java.util.Map; - -public final class CopySinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopySink model = BinaryData.fromString( - "{\"type\":\"CopySink\",\"writeBatchSize\":\"datakbdwzvhtgfdygap\",\"writeBatchTimeout\":\"datawmivazfnbzcjms\",\"sinkRetryCount\":\"datarjby\",\"sinkRetryWait\":\"datakcvahvby\",\"maxConcurrentConnections\":\"datat\",\"disableMetricsCollection\":\"datavwvengicyc\",\"\":{\"vskqxgb\":\"datahgjyholsmahbjc\",\"lklaurly\":\"datagozr\"}}") - .toObject(CopySink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopySink model = new CopySink().withWriteBatchSize("datakbdwzvhtgfdygap") - .withWriteBatchTimeout("datawmivazfnbzcjms") - .withSinkRetryCount("datarjby") - .withSinkRetryWait("datakcvahvby") - .withMaxConcurrentConnections("datat") - .withDisableMetricsCollection("datavwvengicyc") - .withAdditionalProperties(mapOf("type", "CopySink")); - model = BinaryData.fromObject(model).toObject(CopySink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java deleted file mode 100644 index dc9b784dc4e6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopySourceTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopySource; -import java.util.HashMap; -import java.util.Map; - -public final class CopySourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopySource model = BinaryData.fromString( - "{\"type\":\"CopySource\",\"sourceRetryCount\":\"dataqyirupsuyq\",\"sourceRetryWait\":\"dataxnavxzpyaptex\",\"maxConcurrentConnections\":\"datalqhewhcchexc\",\"disableMetricsCollection\":\"datamy\",\"\":{\"wolfmfazxwcaic\":\"dataggmit\",\"liikkosqpl\":\"datapjttzfswohd\",\"uukydi\":\"dataegemtnbke\",\"jfayftohdlp\":\"datasncrzbtlrbzqtuhg\"}}") - .toObject(CopySource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopySource model = new CopySource().withSourceRetryCount("dataqyirupsuyq") - .withSourceRetryWait("dataxnavxzpyaptex") - .withMaxConcurrentConnections("datalqhewhcchexc") - .withDisableMetricsCollection("datamy") - .withAdditionalProperties(mapOf("type", "CopySource")); - model = BinaryData.fromObject(model).toObject(CopySource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java deleted file mode 100644 index 03eedc9b1d3f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CopyTranslatorTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopyTranslator; -import java.util.HashMap; -import java.util.Map; - -public final class CopyTranslatorTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CopyTranslator model = BinaryData.fromString( - "{\"type\":\"CopyTranslator\",\"\":{\"jjwggpcdugwddobp\":\"datary\",\"exedzmmcgqifhyhz\":\"datasj\",\"orvhthxcrwe\":\"datagwkqnmhfml\",\"crcelsnjftnfdcj\":\"dataqkdmpf\"}}") - .toObject(CopyTranslator.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CopyTranslator model = new CopyTranslator().withAdditionalProperties(mapOf("type", "CopyTranslator")); - model = BinaryData.fromObject(model).toObject(CopyTranslator.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java deleted file mode 100644 index 42870b5283a7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiCollectionDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CosmosDbMongoDbApiCollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiCollectionDataset model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiCollection\",\"typeProperties\":{\"collection\":\"datalhqvbk\"},\"description\":\"bpyhssrlvkpkp\",\"structure\":\"datacmaccebx\",\"schema\":\"datapyicy\",\"linkedServiceName\":{\"referenceName\":\"sp\",\"parameters\":{\"ccpumddhgajkr\":\"datahwyykgv\",\"fcudvafnbfbqv\":\"datayddt\",\"ecwzvcmbpwdluda\":\"datanqnxhgkordwzej\",\"ffbvtzldzchub\":\"dataprldidwm\"}},\"parameters\":{\"hfrbzakpjt\":{\"type\":\"Object\",\"defaultValue\":\"datazuvigvl\"},\"qxynqj\":{\"type\":\"Array\",\"defaultValue\":\"dataaqpojpsucmximc\"}},\"annotations\":[\"datatkyvscbgn\",\"datac\",\"datausxhircpg\",\"datavsvkkjbjolpyo\"],\"folder\":{\"name\":\"vuznadvhm\"},\"\":{\"owxxbh\":\"dataoi\",\"ksikawanvmwdv\":\"datapsyioqemqwtqszzg\",\"mpnbnfgyweoj\":\"datajqcrbk\",\"yawkch\":\"dataepgcmahiwf\"}}") - .toObject(CosmosDbMongoDbApiCollectionDataset.class); - Assertions.assertEquals("bpyhssrlvkpkp", model.description()); - Assertions.assertEquals("sp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hfrbzakpjt").type()); - Assertions.assertEquals("vuznadvhm", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiCollectionDataset model = new CosmosDbMongoDbApiCollectionDataset() - .withDescription("bpyhssrlvkpkp") - .withStructure("datacmaccebx") - .withSchema("datapyicy") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("sp") - .withParameters(mapOf("ccpumddhgajkr", "datahwyykgv", "fcudvafnbfbqv", "datayddt", "ecwzvcmbpwdluda", - "datanqnxhgkordwzej", "ffbvtzldzchub", "dataprldidwm"))) - .withParameters(mapOf("hfrbzakpjt", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazuvigvl"), "qxynqj", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataaqpojpsucmximc"))) - .withAnnotations(Arrays.asList("datatkyvscbgn", "datac", "datausxhircpg", "datavsvkkjbjolpyo")) - .withFolder(new DatasetFolder().withName("vuznadvhm")) - .withCollection("datalhqvbk"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiCollectionDataset.class); - Assertions.assertEquals("bpyhssrlvkpkp", model.description()); - Assertions.assertEquals("sp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("hfrbzakpjt").type()); - Assertions.assertEquals("vuznadvhm", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 20d91d196b4d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiCollectionDatasetTypeProperties; - -public final class CosmosDbMongoDbApiCollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collection\":\"dataapitskshfyftt\"}") - .toObject(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiCollectionDatasetTypeProperties model - = new CosmosDbMongoDbApiCollectionDatasetTypeProperties().withCollection("dataapitskshfyftt"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiCollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java deleted file mode 100644 index 48d348d0bc24..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiLinkedService; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CosmosDbMongoDbApiLinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiLinkedService model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApi\",\"typeProperties\":{\"isServerVersionAbove32\":\"datamewwlkryz\",\"connectionString\":\"datapgqqdhtctx\",\"database\":\"dataregykjmpad\"},\"connectVia\":{\"referenceName\":\"joh\",\"parameters\":{\"dljgrpqu\":\"datazhdxit\",\"qqb\":\"datafxg\"}},\"description\":\"tffxdbisihumw\",\"parameters\":{\"zqqihvb\":{\"type\":\"Array\",\"defaultValue\":\"datatgljop\"},\"kjpajl\":{\"type\":\"Int\",\"defaultValue\":\"dataqiqddjynpgomz\"},\"bnwitafjjevp\":{\"type\":\"Object\",\"defaultValue\":\"datavrljlhejcccp\"},\"arskp\":{\"type\":\"Bool\",\"defaultValue\":\"datatghzqwvk\"}},\"annotations\":[\"datatqc\"],\"\":{\"fwrm\":\"databsudcykgulel\",\"oubxlpkdsnbqoyms\":\"datauxyqbm\",\"zyrp\":\"datalraduhgwa\"}}") - .toObject(CosmosDbMongoDbApiLinkedService.class); - Assertions.assertEquals("joh", model.connectVia().referenceName()); - Assertions.assertEquals("tffxdbisihumw", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("zqqihvb").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiLinkedService model = new CosmosDbMongoDbApiLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("joh") - .withParameters(mapOf("dljgrpqu", "datazhdxit", "qqb", "datafxg"))) - .withDescription("tffxdbisihumw") - .withParameters(mapOf("zqqihvb", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datatgljop"), "kjpajl", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataqiqddjynpgomz"), - "bnwitafjjevp", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavrljlhejcccp"), - "arskp", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datatghzqwvk"))) - .withAnnotations(Arrays.asList("datatqc")) - .withIsServerVersionAbove32("datamewwlkryz") - .withConnectionString("datapgqqdhtctx") - .withDatabase("dataregykjmpad"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiLinkedService.class); - Assertions.assertEquals("joh", model.connectVia().referenceName()); - Assertions.assertEquals("tffxdbisihumw", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("zqqihvb").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java deleted file mode 100644 index 2037320bf449..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiLinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbMongoDbApiLinkedServiceTypeProperties; - -public final class CosmosDbMongoDbApiLinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"isServerVersionAbove32\":\"datay\",\"connectionString\":\"dataijribeskkopbksri\",\"database\":\"datambtmorikcze\"}") - .toObject(CosmosDbMongoDbApiLinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiLinkedServiceTypeProperties model - = new CosmosDbMongoDbApiLinkedServiceTypeProperties().withIsServerVersionAbove32("datay") - .withConnectionString("dataijribeskkopbksri") - .withDatabase("datambtmorikcze"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiLinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java deleted file mode 100644 index 07f65d2ff62c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiSink; - -public final class CosmosDbMongoDbApiSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiSink model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiSink\",\"writeBehavior\":\"dataylt\",\"writeBatchSize\":\"dataqjfoujeiagnyeefj\",\"writeBatchTimeout\":\"dataayrwyffut\",\"sinkRetryCount\":\"dataxrpxdhzwdy\",\"sinkRetryWait\":\"datayhvx\",\"maxConcurrentConnections\":\"dataexwhoscinpmvcvnm\",\"disableMetricsCollection\":\"datalshglym\",\"\":{\"hiayro\":\"dataazvc\",\"glytwzttkhyrwd\":\"dataxeezlqw\",\"pqatkzghwcywrb\":\"datanpuoaorfpizybpj\",\"kf\":\"dataxwls\"}}") - .toObject(CosmosDbMongoDbApiSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiSink model = new CosmosDbMongoDbApiSink().withWriteBatchSize("dataqjfoujeiagnyeefj") - .withWriteBatchTimeout("dataayrwyffut") - .withSinkRetryCount("dataxrpxdhzwdy") - .withSinkRetryWait("datayhvx") - .withMaxConcurrentConnections("dataexwhoscinpmvcvnm") - .withDisableMetricsCollection("datalshglym") - .withWriteBehavior("dataylt"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java deleted file mode 100644 index 511a803b7345..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbMongoDbApiSourceTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbMongoDbApiSource; -import com.azure.resourcemanager.datafactory.models.MongoDbCursorMethodsProperties; -import java.util.HashMap; -import java.util.Map; - -public final class CosmosDbMongoDbApiSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbMongoDbApiSource model = BinaryData.fromString( - "{\"type\":\"CosmosDbMongoDbApiSource\",\"filter\":\"datazdb\",\"cursorMethods\":{\"project\":\"datapphfmzeufjzqaqe\",\"sort\":\"databygqcwz\",\"skip\":\"dataomnqcthgqy\",\"limit\":\"dataoaz\",\"\":{\"pyekslllz\":\"datakkcqafnvjgixsjhi\",\"pnloifx\":\"dataqolckwhgnfbnnh\"}},\"batchSize\":\"dataohfvxavhfhlx\",\"queryTimeout\":\"datawzpba\",\"additionalColumns\":\"datafrfaytcyg\",\"sourceRetryCount\":\"datam\",\"sourceRetryWait\":\"datarmbcklfp\",\"maxConcurrentConnections\":\"datagfvvnkpwl\",\"disableMetricsCollection\":\"datazxdzold\",\"\":{\"regesoozpudalu\":\"datanpnyaterjjuz\"}}") - .toObject(CosmosDbMongoDbApiSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbMongoDbApiSource model = new CosmosDbMongoDbApiSource().withSourceRetryCount("datam") - .withSourceRetryWait("datarmbcklfp") - .withMaxConcurrentConnections("datagfvvnkpwl") - .withDisableMetricsCollection("datazxdzold") - .withFilter("datazdb") - .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("datapphfmzeufjzqaqe") - .withSort("databygqcwz") - .withSkip("dataomnqcthgqy") - .withLimit("dataoaz") - .withAdditionalProperties(mapOf())) - .withBatchSize("dataohfvxavhfhlx") - .withQueryTimeout("datawzpba") - .withAdditionalColumns("datafrfaytcyg"); - model = BinaryData.fromObject(model).toObject(CosmosDbMongoDbApiSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java deleted file mode 100644 index cba25b4a3388..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiCollectionDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CosmosDbSqlApiCollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbSqlApiCollectionDataset model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiCollection\",\"typeProperties\":{\"collectionName\":\"datahefr\"},\"description\":\"s\",\"structure\":\"dataerbgpxeb\",\"schema\":\"dataudcaytujrax\",\"linkedServiceName\":{\"referenceName\":\"tpryjmgtn\",\"parameters\":{\"s\":\"dataouxly\"}},\"parameters\":{\"sehq\":{\"type\":\"Object\",\"defaultValue\":\"dataurkep\"},\"erxxxoteehkhowgo\":{\"type\":\"String\",\"defaultValue\":\"datampctwjwdsdlzmk\"},\"c\":{\"type\":\"Int\",\"defaultValue\":\"datahxow\"},\"ylqlocvvujexaygl\":{\"type\":\"Bool\",\"defaultValue\":\"datanpxraqawbmpspf\"}},\"annotations\":[\"datagjnm\"],\"folder\":{\"name\":\"slavxjfiuof\"},\"\":{\"kyhydvikmf\":\"dataidzlvssqywjopa\",\"izwgsoriobijeiyd\":\"datagpmillxgjs\",\"okayrg\":\"dataeuynh\",\"weoftnorw\":\"dataybriop\"}}") - .toObject(CosmosDbSqlApiCollectionDataset.class); - Assertions.assertEquals("s", model.description()); - Assertions.assertEquals("tpryjmgtn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("sehq").type()); - Assertions.assertEquals("slavxjfiuof", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbSqlApiCollectionDataset model = new CosmosDbSqlApiCollectionDataset().withDescription("s") - .withStructure("dataerbgpxeb") - .withSchema("dataudcaytujrax") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("tpryjmgtn").withParameters(mapOf("s", "dataouxly"))) - .withParameters( - mapOf("sehq", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataurkep"), - "erxxxoteehkhowgo", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datampctwjwdsdlzmk"), - "c", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datahxow"), - "ylqlocvvujexaygl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datanpxraqawbmpspf"))) - .withAnnotations(Arrays.asList("datagjnm")) - .withFolder(new DatasetFolder().withName("slavxjfiuof")) - .withCollectionName("datahefr"); - model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiCollectionDataset.class); - Assertions.assertEquals("s", model.description()); - Assertions.assertEquals("tpryjmgtn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("sehq").type()); - Assertions.assertEquals("slavxjfiuof", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 6842156c707e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiCollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CosmosDbSqlApiCollectionDatasetTypeProperties; - -public final class CosmosDbSqlApiCollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbSqlApiCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collectionName\":\"dataigsioctqkm\"}") - .toObject(CosmosDbSqlApiCollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbSqlApiCollectionDatasetTypeProperties model - = new CosmosDbSqlApiCollectionDatasetTypeProperties().withCollectionName("dataigsioctqkm"); - model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiCollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java deleted file mode 100644 index a6976d782c6c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiSink; - -public final class CosmosDbSqlApiSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbSqlApiSink model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiSink\",\"writeBehavior\":\"datantakr\",\"writeBatchSize\":\"dataku\",\"writeBatchTimeout\":\"datasuwcmzpwkcagfq\",\"sinkRetryCount\":\"dataqgmjjprd\",\"sinkRetryWait\":\"datablonlhtgexwjhicu\",\"maxConcurrentConnections\":\"dataavimxnhylwogtvl\",\"disableMetricsCollection\":\"datagd\",\"\":{\"k\":\"datanhdxlfntdc\"}}") - .toObject(CosmosDbSqlApiSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbSqlApiSink model = new CosmosDbSqlApiSink().withWriteBatchSize("dataku") - .withWriteBatchTimeout("datasuwcmzpwkcagfq") - .withSinkRetryCount("dataqgmjjprd") - .withSinkRetryWait("datablonlhtgexwjhicu") - .withMaxConcurrentConnections("dataavimxnhylwogtvl") - .withDisableMetricsCollection("datagd") - .withWriteBehavior("datantakr"); - model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java deleted file mode 100644 index 493469782a28..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CosmosDbSqlApiSourceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CosmosDbSqlApiSource; - -public final class CosmosDbSqlApiSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CosmosDbSqlApiSource model = BinaryData.fromString( - "{\"type\":\"CosmosDbSqlApiSource\",\"query\":\"datasjssreojsgko\",\"pageSize\":\"datanpgkxyr\",\"preferredRegions\":\"datap\",\"detectDatetime\":\"datahyekggo\",\"additionalColumns\":\"datalqvuwsqmwqsg\",\"sourceRetryCount\":\"dataz\",\"sourceRetryWait\":\"datastngxvrpkiz\",\"maxConcurrentConnections\":\"datakgdsursumbcirkbk\",\"disableMetricsCollection\":\"datasvo\",\"\":{\"mg\":\"dataxumuuyblolruf\"}}") - .toObject(CosmosDbSqlApiSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CosmosDbSqlApiSource model = new CosmosDbSqlApiSource().withSourceRetryCount("dataz") - .withSourceRetryWait("datastngxvrpkiz") - .withMaxConcurrentConnections("datakgdsursumbcirkbk") - .withDisableMetricsCollection("datasvo") - .withQuery("datasjssreojsgko") - .withPageSize("datanpgkxyr") - .withPreferredRegions("datap") - .withDetectDatetime("datahyekggo") - .withAdditionalColumns("datalqvuwsqmwqsg"); - model = BinaryData.fromObject(model).toObject(CosmosDbSqlApiSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java deleted file mode 100644 index d6be3436aa09..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CouchbaseSource; - -public final class CouchbaseSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CouchbaseSource model = BinaryData.fromString( - "{\"type\":\"CouchbaseSource\",\"query\":\"dataxlkcw\",\"queryTimeout\":\"dataejssksgxykdepqcy\",\"additionalColumns\":\"datahwsxpzkmotgmd\",\"sourceRetryCount\":\"datawwqevbiuntp\",\"sourceRetryWait\":\"datamwjxlyce\",\"maxConcurrentConnections\":\"dataeqgywrauur\",\"disableMetricsCollection\":\"datad\",\"\":{\"rmiecfmqcxm\":\"datacnk\"}}") - .toObject(CouchbaseSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CouchbaseSource model = new CouchbaseSource().withSourceRetryCount("datawwqevbiuntp") - .withSourceRetryWait("datamwjxlyce") - .withMaxConcurrentConnections("dataeqgywrauur") - .withDisableMetricsCollection("datad") - .withQueryTimeout("dataejssksgxykdepqcy") - .withAdditionalColumns("datahwsxpzkmotgmd") - .withQuery("dataxlkcw"); - model = BinaryData.fromObject(model).toObject(CouchbaseSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java deleted file mode 100644 index 5ec6b09b58a2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CouchbaseTableDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CouchbaseTableDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CouchbaseTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CouchbaseTableDataset model = BinaryData.fromString( - "{\"type\":\"CouchbaseTable\",\"typeProperties\":{\"tableName\":\"dataeeocnqoubvepvlr\"},\"description\":\"zfhd\",\"structure\":\"datafhxohz\",\"schema\":\"datahhavzfuxnvk\",\"linkedServiceName\":{\"referenceName\":\"slcof\",\"parameters\":{\"dxqfussubzssp\":\"datafuehouisaklhjf\",\"kqdgwbztrth\":\"datajvailfauyvxpqw\",\"ffjdhgslormhbt\":\"datadwvog\"}},\"parameters\":{\"kygbptmsgkwedwlx\":{\"type\":\"Float\",\"defaultValue\":\"datakylhcnsdylmnqu\"},\"hudhrpj\":{\"type\":\"Int\",\"defaultValue\":\"datagbg\"}},\"annotations\":[\"datarr\",\"datafibpkwmamrlfi\",\"datajudd\",\"datad\"],\"folder\":{\"name\":\"pngyhylqyafe\"},\"\":{\"u\":\"dataodx\",\"tzeargvfvkhbj\":\"dataytxnxrqx\",\"gnybffqcwwyne\":\"datamvpjxsdhwe\",\"jijfhpxni\":\"dataktvmwgvconyse\"}}") - .toObject(CouchbaseTableDataset.class); - Assertions.assertEquals("zfhd", model.description()); - Assertions.assertEquals("slcof", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kygbptmsgkwedwlx").type()); - Assertions.assertEquals("pngyhylqyafe", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CouchbaseTableDataset model = new CouchbaseTableDataset().withDescription("zfhd") - .withStructure("datafhxohz") - .withSchema("datahhavzfuxnvk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("slcof") - .withParameters(mapOf("dxqfussubzssp", "datafuehouisaklhjf", "kqdgwbztrth", "datajvailfauyvxpqw", - "ffjdhgslormhbt", "datadwvog"))) - .withParameters(mapOf("kygbptmsgkwedwlx", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakylhcnsdylmnqu"), - "hudhrpj", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datagbg"))) - .withAnnotations(Arrays.asList("datarr", "datafibpkwmamrlfi", "datajudd", "datad")) - .withFolder(new DatasetFolder().withName("pngyhylqyafe")) - .withTableName("dataeeocnqoubvepvlr"); - model = BinaryData.fromObject(model).toObject(CouchbaseTableDataset.class); - Assertions.assertEquals("zfhd", model.description()); - Assertions.assertEquals("slcof", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("kygbptmsgkwedwlx").type()); - Assertions.assertEquals("pngyhylqyafe", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java deleted file mode 100644 index 186b3f18f385..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionRequestTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CreateDataFlowDebugSessionRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CreateDataFlowDebugSessionRequest model = BinaryData.fromString( - "{\"computeType\":\"foudor\",\"coreCount\":199814192,\"timeToLive\":1885072328,\"integrationRuntime\":{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"otwypundmb\",\"\":{\"or\":\"datagcmjkavl\",\"jltfvnzcyjtotpv\":\"datamftpmdtz\",\"qwthmky\":\"datapvpbdbzqgqqiheds\",\"gqcwdhohsdtmc\":\"databcysih\"}},\"name\":\"sufco\"}}") - .toObject(CreateDataFlowDebugSessionRequest.class); - Assertions.assertEquals("foudor", model.computeType()); - Assertions.assertEquals(199814192, model.coreCount()); - Assertions.assertEquals(1885072328, model.timeToLive()); - Assertions.assertEquals("sufco", model.integrationRuntime().name()); - Assertions.assertEquals("otwypundmb", model.integrationRuntime().properties().description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CreateDataFlowDebugSessionRequest model = new CreateDataFlowDebugSessionRequest().withComputeType("foudor") - .withCoreCount(199814192) - .withTimeToLive(1885072328) - .withIntegrationRuntime(new IntegrationRuntimeDebugResource().withName("sufco") - .withProperties(new IntegrationRuntime().withDescription("otwypundmb") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime")))); - model = BinaryData.fromObject(model).toObject(CreateDataFlowDebugSessionRequest.class); - Assertions.assertEquals("foudor", model.computeType()); - Assertions.assertEquals(199814192, model.coreCount()); - Assertions.assertEquals(1885072328, model.timeToLive()); - Assertions.assertEquals("sufco", model.integrationRuntime().name()); - Assertions.assertEquals("otwypundmb", model.integrationRuntime().properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java deleted file mode 100644 index 40d08323ef0e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateDataFlowDebugSessionResponseInnerTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CreateDataFlowDebugSessionResponseInner; -import org.junit.jupiter.api.Assertions; - -public final class CreateDataFlowDebugSessionResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CreateDataFlowDebugSessionResponseInner model - = BinaryData.fromString("{\"status\":\"zvd\",\"sessionId\":\"zdix\"}") - .toObject(CreateDataFlowDebugSessionResponseInner.class); - Assertions.assertEquals("zvd", model.status()); - Assertions.assertEquals("zdix", model.sessionId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CreateDataFlowDebugSessionResponseInner model - = new CreateDataFlowDebugSessionResponseInner().withStatus("zvd").withSessionId("zdix"); - model = BinaryData.fromObject(model).toObject(CreateDataFlowDebugSessionResponseInner.class); - Assertions.assertEquals("zvd", model.status()); - Assertions.assertEquals("zdix", model.sessionId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java deleted file mode 100644 index b3b56bd0fd34..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateLinkedIntegrationRuntimeRequestTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest; -import org.junit.jupiter.api.Assertions; - -public final class CreateLinkedIntegrationRuntimeRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CreateLinkedIntegrationRuntimeRequest model = BinaryData.fromString( - "{\"name\":\"ni\",\"subscriptionId\":\"x\",\"dataFactoryName\":\"kpycgklwndnhjd\",\"dataFactoryLocation\":\"whvylw\"}") - .toObject(CreateLinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("ni", model.name()); - Assertions.assertEquals("x", model.subscriptionId()); - Assertions.assertEquals("kpycgklwndnhjd", model.dataFactoryName()); - Assertions.assertEquals("whvylw", model.dataFactoryLocation()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CreateLinkedIntegrationRuntimeRequest model = new CreateLinkedIntegrationRuntimeRequest().withName("ni") - .withSubscriptionId("x") - .withDataFactoryName("kpycgklwndnhjd") - .withDataFactoryLocation("whvylw"); - model = BinaryData.fromObject(model).toObject(CreateLinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("ni", model.name()); - Assertions.assertEquals("x", model.subscriptionId()); - Assertions.assertEquals("kpycgklwndnhjd", model.dataFactoryName()); - Assertions.assertEquals("whvylw", model.dataFactoryLocation()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java deleted file mode 100644 index fe9d54777e03..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CreateRunResponseInnerTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CreateRunResponseInner; -import org.junit.jupiter.api.Assertions; - -public final class CreateRunResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CreateRunResponseInner model - = BinaryData.fromString("{\"runId\":\"vvhmxtdrj\"}").toObject(CreateRunResponseInner.class); - Assertions.assertEquals("vvhmxtdrj", model.runId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CreateRunResponseInner model = new CreateRunResponseInner().withRunId("vvhmxtdrj"); - model = BinaryData.fromObject(model).toObject(CreateRunResponseInner.class); - Assertions.assertEquals("vvhmxtdrj", model.runId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java deleted file mode 100644 index 4ac1909944e5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialListResponseTests.java +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; -import com.azure.resourcemanager.datafactory.models.Credential; -import com.azure.resourcemanager.datafactory.models.CredentialListResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CredentialListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CredentialListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"Credential\",\"description\":\"oqhnlb\",\"annotations\":[\"dataldxea\",\"datalgsc\",\"dataorim\",\"datarsrrmoucsofldp\"],\"\":{\"uqibsxtkcu\":\"datayfcaabeolhbhlvbm\"}},\"name\":\"b\",\"type\":\"arfsi\",\"etag\":\"lkjxnqpvwgf\",\"id\":\"mhqykizmdksa\"},{\"properties\":{\"type\":\"Credential\",\"description\":\"fcluqvo\",\"annotations\":[\"datacjimryvwgcwwpbmz\",\"dataw\",\"datasydsxwefohe\",\"databvopwndyqle\"],\"\":{\"pvbrdfjmzsyz\":\"dataklmtkhlowkx\",\"pjrtws\":\"datahotlhikcyychunsj\",\"uic\":\"datahv\"}},\"name\":\"vtrrmhwrbfdpyflu\",\"type\":\"vjglrocuyzlwhhme\",\"etag\":\"ooclutnp\",\"id\":\"emc\"},{\"properties\":{\"type\":\"Credential\",\"description\":\"kmmykyujxsglh\",\"annotations\":[\"datarye\",\"dataylmbkzudni\",\"datarfih\",\"datatjewlpxuzzj\"],\"\":{\"iwfbrkwpzdqtvhcs\":\"dataefqyhqotoihiqaky\",\"ietgbebjfu\":\"dataodaqaxsi\"}},\"name\":\"moichdlpnfpubntn\",\"type\":\"tzviqsowsaaelcat\",\"etag\":\"ju\",\"id\":\"lrvkmjc\"},{\"properties\":{\"type\":\"Credential\",\"description\":\"jvlgfggcvkyyliz\",\"annotations\":[\"datajpsfxsfu\",\"datatl\",\"datatmvag\",\"dataw\"],\"\":{\"rvjfnmjmvlw\":\"datalvhukoveofi\"}},\"name\":\"giblkujrllf\",\"type\":\"uidjpuuyjucej\",\"etag\":\"zoeovvtzejet\",\"id\":\"ln\"}],\"nextLink\":\"k\"}") - .toObject(CredentialListResponse.class); - Assertions.assertEquals("mhqykizmdksa", model.value().get(0).id()); - Assertions.assertEquals("oqhnlb", model.value().get(0).properties().description()); - Assertions.assertEquals("k", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CredentialListResponse model - = new CredentialListResponse() - .withValue( - Arrays.asList( - new CredentialResourceInner().withId("mhqykizmdksa") - .withProperties(new Credential().withDescription("oqhnlb") - .withAnnotations( - Arrays.asList("dataldxea", "datalgsc", "dataorim", "datarsrrmoucsofldp")) - .withAdditionalProperties(mapOf("type", "Credential"))), - new CredentialResourceInner().withId("emc") - .withProperties(new Credential().withDescription("fcluqvo") - .withAnnotations(Arrays - .asList("datacjimryvwgcwwpbmz", "dataw", "datasydsxwefohe", "databvopwndyqle")) - .withAdditionalProperties(mapOf("type", "Credential"))), - new CredentialResourceInner().withId("lrvkmjc") - .withProperties(new Credential().withDescription("kmmykyujxsglh") - .withAnnotations( - Arrays.asList("datarye", "dataylmbkzudni", "datarfih", "datatjewlpxuzzj")) - .withAdditionalProperties(mapOf("type", "Credential"))), - new CredentialResourceInner().withId("ln") - .withProperties(new Credential().withDescription("jvlgfggcvkyyliz") - .withAnnotations(Arrays.asList("datajpsfxsfu", "datatl", "datatmvag", "dataw")) - .withAdditionalProperties(mapOf("type", "Credential"))))) - .withNextLink("k"); - model = BinaryData.fromObject(model).toObject(CredentialListResponse.class); - Assertions.assertEquals("mhqykizmdksa", model.value().get(0).id()); - Assertions.assertEquals("oqhnlb", model.value().get(0).properties().description()); - Assertions.assertEquals("k", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 0aff90588b2c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.Credential; -import com.azure.resourcemanager.datafactory.models.CredentialResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class CredentialOperationsCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Credential\",\"description\":\"jrncvjs\",\"annotations\":[\"dataurcxtyfbbomugubc\",\"datavvdafbtozxvkoieb\"],\"\":{\"iybfskxuyo\":\"datayxmljnseaog\",\"uclq\":\"dataddry\",\"edtwtukkhuusrm\":\"datacwpgipttp\",\"upkebwses\":\"datatonpgtaz\"}},\"name\":\"sbrwdfoprdyt\",\"type\":\"ypv\",\"etag\":\"zooqzdoytnpk\",\"id\":\"cgtgnhzufhwftjnv\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CredentialResource response = manager.credentialOperations() - .define("arsva") - .withExistingFactory("yxgxohiwkkhth", "ukclayqipwkxfnn") - .withProperties(new Credential().withDescription("hme") - .withAnnotations(Arrays.asList("dataakpr", "dataajtqyevqtydxkpy", "dataxcjafhctqn")) - .withAdditionalProperties(mapOf("type", "Credential"))) - .withIfMatch("blnxquge") - .create(); - - Assertions.assertEquals("cgtgnhzufhwftjnv", response.id()); - Assertions.assertEquals("jrncvjs", response.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java deleted file mode 100644 index cf189db3f465..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class CredentialOperationsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.credentialOperations() - .deleteWithResponse("hljqhoiqvkzm", "j", "xzttgva", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java deleted file mode 100644 index eace8f525c46..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsGetWithResponseMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.CredentialResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class CredentialOperationsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Credential\",\"description\":\"yafdlfkyirjbfwr\",\"annotations\":[\"dataibzoqgut\"],\"\":{\"mhaarkh\":\"datawwmkgzsqrirlc\",\"ercshiuwnef\":\"dataa\",\"klqtnngwpgbfrtxb\":\"databheiywmxsxl\"}},\"name\":\"afttvpmdnigajbxj\",\"type\":\"lfdqpa\",\"etag\":\"wmxmdjezhutc\",\"id\":\"qdchmxr\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CredentialResource response = manager.credentialOperations() - .getWithResponse("ogboaimwxswfytn", "cjhjrwn", "gtgc", "mpjdrhxfg", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("qdchmxr", response.id()); - Assertions.assertEquals("yafdlfkyirjbfwr", response.properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java deleted file mode 100644 index 977d82dd1960..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialOperationsListByFactoryMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.CredentialResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class CredentialOperationsListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"Credential\",\"description\":\"my\",\"annotations\":[\"datanayfkvwlfcfeyh\",\"datayn\",\"datampuqnvn\"],\"\":{\"jccjorovrfdf\":\"datawicouil\",\"djpkojykytp\":\"datadvifoxozqcy\"}},\"name\":\"rctd\",\"type\":\"jhukwykikqagm\",\"etag\":\"szipihenvhlpu\",\"id\":\"haomaowpmwjsvuz\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.credentialOperations() - .listByFactory("fiomfkiopk", "hbfnhspogxuv", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("haomaowpmwjsvuz", response.iterator().next().id()); - Assertions.assertEquals("my", response.iterator().next().properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java deleted file mode 100644 index 8afd28791548..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialReferenceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CredentialReference; -import com.azure.resourcemanager.datafactory.models.CredentialReferenceType; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CredentialReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CredentialReference model = BinaryData.fromString( - "{\"type\":\"CredentialReference\",\"referenceName\":\"arxqiubx\",\"\":{\"mmkoszudb\":\"dataecpxdazvdhc\",\"f\":\"datansntrpcaqki\",\"ezyquw\":\"databtfmhklbnldpvcbh\"}}") - .toObject(CredentialReference.class); - Assertions.assertEquals(CredentialReferenceType.CREDENTIAL_REFERENCE, model.type()); - Assertions.assertEquals("arxqiubx", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CredentialReference model = new CredentialReference().withType(CredentialReferenceType.CREDENTIAL_REFERENCE) - .withReferenceName("arxqiubx") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(CredentialReference.class); - Assertions.assertEquals(CredentialReferenceType.CREDENTIAL_REFERENCE, model.type()); - Assertions.assertEquals("arxqiubx", model.referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java deleted file mode 100644 index f0e94dbbaf82..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialResourceInnerTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner; -import com.azure.resourcemanager.datafactory.models.Credential; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CredentialResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CredentialResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Credential\",\"description\":\"uzk\",\"annotations\":[\"datazolxrzvhqjwtr\",\"datatgvgzp\",\"datarrkolawjmjs\"],\"\":{\"tlhguynuchl\":\"dataokcdxfzzzwyjaf\",\"whmozusgzvlnsnnj\":\"datamltx\",\"rag\":\"datafpafolpymwamxq\",\"vl\":\"datagdphtvdula\"}},\"name\":\"c\",\"type\":\"srlzknmzlan\",\"etag\":\"pdwvnphcn\",\"id\":\"tpjhm\"}") - .toObject(CredentialResourceInner.class); - Assertions.assertEquals("tpjhm", model.id()); - Assertions.assertEquals("uzk", model.properties().description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CredentialResourceInner model = new CredentialResourceInner().withId("tpjhm") - .withProperties(new Credential().withDescription("uzk") - .withAnnotations(Arrays.asList("datazolxrzvhqjwtr", "datatgvgzp", "datarrkolawjmjs")) - .withAdditionalProperties(mapOf("type", "Credential"))); - model = BinaryData.fromObject(model).toObject(CredentialResourceInner.class); - Assertions.assertEquals("tpjhm", model.id()); - Assertions.assertEquals("uzk", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java deleted file mode 100644 index 97949ac973f4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CredentialTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Credential; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CredentialTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Credential model = BinaryData.fromString( - "{\"type\":\"Credential\",\"description\":\"hvthlaiwdcxsm\",\"annotations\":[\"datahzdtxetlgydlh\",\"datavlnnpx\",\"datab\",\"datafiqgeaar\"],\"\":{\"ulidwc\":\"dataekglklb\",\"nfhjirwgdnqzbr\":\"datavmzegj\",\"igsxcdgljplk\":\"datakspzhzmtksjcit\"}}") - .toObject(Credential.class); - Assertions.assertEquals("hvthlaiwdcxsm", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Credential model = new Credential().withDescription("hvthlaiwdcxsm") - .withAnnotations(Arrays.asList("datahzdtxetlgydlh", "datavlnnpx", "datab", "datafiqgeaar")) - .withAdditionalProperties(mapOf("type", "Credential")); - model = BinaryData.fromObject(model).toObject(Credential.class); - Assertions.assertEquals("hvthlaiwdcxsm", model.description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java deleted file mode 100644 index 079c86373cc3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityReferenceObjectTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CustomActivityReferenceObject; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomActivityReferenceObjectTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomActivityReferenceObject model = BinaryData.fromString( - "{\"linkedServices\":[{\"referenceName\":\"tl\",\"parameters\":{\"zrzi\":\"dataeyhpbt\",\"y\":\"datahkkdcjymdoldb\"}},{\"referenceName\":\"xgephviuexfbrsd\",\"parameters\":{\"agaxru\":\"datayi\"}}],\"datasets\":[{\"referenceName\":\"mtiuxy\",\"parameters\":{\"qlhzdbbitpgr\":\"datavp\",\"mojxbvgiee\":\"datahpmsdgmxwfodvzp\",\"vjodgplagwvgb\":\"dataevhnqtba\",\"syszl\":\"dataxmqudnqcbbbhin\"}},{\"referenceName\":\"fzkvrmdoshiyy\",\"parameters\":{\"ybp\":\"datanrg\",\"rvqticgsdcpmclku\":\"datawjjbmkhxun\",\"dcqrssqwzndzuxlg\":\"datadabh\"}},{\"referenceName\":\"y\",\"parameters\":{\"m\":\"datayqlzo\",\"jvbzablmpnt\":\"dataa\"}}]}") - .toObject(CustomActivityReferenceObject.class); - Assertions.assertEquals("tl", model.linkedServices().get(0).referenceName()); - Assertions.assertEquals("mtiuxy", model.datasets().get(0).referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomActivityReferenceObject model - = new CustomActivityReferenceObject() - .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("tl") - .withParameters(mapOf("zrzi", "dataeyhpbt", "y", "datahkkdcjymdoldb")), - new LinkedServiceReference().withReferenceName("xgephviuexfbrsd") - .withParameters(mapOf("agaxru", "datayi")))) - .withDatasets(Arrays.asList( - new DatasetReference().withReferenceName("mtiuxy") - .withParameters(mapOf("qlhzdbbitpgr", "datavp", "mojxbvgiee", "datahpmsdgmxwfodvzp", - "vjodgplagwvgb", "dataevhnqtba", "syszl", "dataxmqudnqcbbbhin")), - new DatasetReference().withReferenceName("fzkvrmdoshiyy") - .withParameters(mapOf("ybp", "datanrg", "rvqticgsdcpmclku", "datawjjbmkhxun", - "dcqrssqwzndzuxlg", "datadabh")), - new DatasetReference().withReferenceName("y") - .withParameters(mapOf("m", "datayqlzo", "jvbzablmpnt", "dataa")))); - model = BinaryData.fromObject(model).toObject(CustomActivityReferenceObject.class); - Assertions.assertEquals("tl", model.linkedServices().get(0).referenceName()); - Assertions.assertEquals("mtiuxy", model.datasets().get(0).referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java deleted file mode 100644 index e9206bd9e8d4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTests.java +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.CustomActivity; -import com.azure.resourcemanager.datafactory.models.CustomActivityReferenceObject; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomActivity model = BinaryData.fromString( - "{\"type\":\"Custom\",\"typeProperties\":{\"command\":\"dataluszi\",\"resourceLinkedService\":{\"referenceName\":\"rcpxlki\",\"parameters\":{\"svcmceh\":\"datatbvh\",\"krp\":\"datarhwriihwxchyyf\"}},\"folderPath\":\"datanminsqjnuii\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"rpeyigfdppgkk\",\"parameters\":{\"jgdybg\":\"datagjld\",\"tfwculsbnapzfdzm\":\"datatseznowzfxkof\",\"rbclj\":\"datal\"}},{\"referenceName\":\"rj\",\"parameters\":{\"dpbmoq\":\"datanzzlfvefskjbasm\",\"apmeomcpvmakdtg\":\"datasvukgfzby\",\"myewbfo\":\"datanyubnw\",\"etj\":\"datawv\"}},{\"referenceName\":\"njbahxyfdd\",\"parameters\":{\"jh\":\"dataksr\",\"acyxnzadflv\":\"datacrmptjsixawipj\",\"vapo\":\"datakhgdzrcq\",\"hlovmx\":\"dataeminerejrd\"}},{\"referenceName\":\"ztdcadbm\",\"parameters\":{\"mh\":\"dataqmitrpa\",\"sziuhm\":\"datawcy\"}}],\"datasets\":[{\"referenceName\":\"hupoeljf\",\"parameters\":{\"awbsdeqqbdcbnrg\":\"datayoxajit\",\"mtgtnb\":\"datapnor\"}},{\"referenceName\":\"sopuwesmxodyto\",\"parameters\":{\"einhnsdg\":\"datal\"}},{\"referenceName\":\"uaqtqnqm\",\"parameters\":{\"ukenkuyombkgkyo\":\"datatzgomu\",\"yxkzxrm\":\"datauihprvokodr\",\"kafdxgtgcfkeaeup\":\"dataoycufkxygxoub\"}},{\"referenceName\":\"htlk\",\"parameters\":{\"ysrswzhciazweb\":\"dataqebbqetxsuxvjjwl\",\"yvzodnx\":\"dataskmqkanuxju\",\"ufancjlkrskz\":\"datacdgk\"}}]},\"extendedProperties\":{\"qjwvqia\":\"databafqzihmvw\"},\"retentionTimeInDays\":\"dataqjzvivwd\",\"autoUserSpecification\":\"datatcfulmzxhgwzbyst\"},\"linkedServiceName\":{\"referenceName\":\"uwehntjss\",\"parameters\":{\"ca\":\"datanatpymvqich\"}},\"policy\":{\"timeout\":\"datayjzn\",\"retry\":\"datayou\",\"retryIntervalInSeconds\":64897243,\"secureInput\":true,\"secureOutput\":true,\"\":{\"uasutdhmilhzy\":\"datavincnihmwvhcgc\",\"un\":\"datadjc\"}},\"name\":\"dadyrhmpokfxcbb\",\"description\":\"xgajyrac\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"eouhjetxupxe\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"geaenkfsxtsmzva\":\"dataru\"}}],\"userProperties\":[{\"name\":\"ajalbsw\",\"value\":\"databrllvvazu\"},{\"name\":\"cqzznwlxzmszx\",\"value\":\"datafai\"}],\"\":{\"lxjbrqbut\":\"datadqvdivzjyxsjbl\",\"vawe\":\"dataacnqudmyd\"}}") - .toObject(CustomActivity.class); - Assertions.assertEquals("dadyrhmpokfxcbb", model.name()); - Assertions.assertEquals("xgajyrac", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("eouhjetxupxe", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ajalbsw", model.userProperties().get(0).name()); - Assertions.assertEquals("uwehntjss", model.linkedServiceName().referenceName()); - Assertions.assertEquals(64897243, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("rcpxlki", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("rpeyigfdppgkk", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("hupoeljf", model.referenceObjects().datasets().get(0).referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomActivity model - = new CustomActivity().withName("dadyrhmpokfxcbb") - .withDescription("xgajyrac") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("eouhjetxupxe") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ajalbsw").withValue("databrllvvazu"), - new UserProperty().withName("cqzznwlxzmszx").withValue("datafai"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uwehntjss") - .withParameters(mapOf("ca", "datanatpymvqich"))) - .withPolicy(new ActivityPolicy().withTimeout("datayjzn") - .withRetry("datayou") - .withRetryIntervalInSeconds(64897243) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withCommand("dataluszi") - .withResourceLinkedService(new LinkedServiceReference().withReferenceName("rcpxlki") - .withParameters(mapOf("svcmceh", "datatbvh", "krp", "datarhwriihwxchyyf"))) - .withFolderPath("datanminsqjnuii") - .withReferenceObjects( - new CustomActivityReferenceObject() - .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("rpeyigfdppgkk") - .withParameters(mapOf("jgdybg", "datagjld", "tfwculsbnapzfdzm", "datatseznowzfxkof", - "rbclj", "datal")), - new LinkedServiceReference().withReferenceName("rj") - .withParameters(mapOf("dpbmoq", "datanzzlfvefskjbasm", "apmeomcpvmakdtg", - "datasvukgfzby", "myewbfo", "datanyubnw", "etj", "datawv")), - new LinkedServiceReference().withReferenceName("njbahxyfdd") - .withParameters(mapOf("jh", "dataksr", "acyxnzadflv", "datacrmptjsixawipj", "vapo", - "datakhgdzrcq", "hlovmx", "dataeminerejrd")), - new LinkedServiceReference().withReferenceName("ztdcadbm") - .withParameters(mapOf("mh", "dataqmitrpa", "sziuhm", "datawcy")))) - .withDatasets(Arrays.asList( - new DatasetReference().withReferenceName("hupoeljf") - .withParameters(mapOf("awbsdeqqbdcbnrg", "datayoxajit", "mtgtnb", "datapnor")), - new DatasetReference().withReferenceName("sopuwesmxodyto") - .withParameters(mapOf("einhnsdg", "datal")), - new DatasetReference().withReferenceName("uaqtqnqm") - .withParameters(mapOf("ukenkuyombkgkyo", "datatzgomu", "yxkzxrm", "datauihprvokodr", - "kafdxgtgcfkeaeup", "dataoycufkxygxoub")), - new DatasetReference().withReferenceName("htlk") - .withParameters(mapOf("ysrswzhciazweb", "dataqebbqetxsuxvjjwl", "yvzodnx", - "dataskmqkanuxju", "ufancjlkrskz", "datacdgk"))))) - .withExtendedProperties(mapOf("qjwvqia", "databafqzihmvw")) - .withRetentionTimeInDays("dataqjzvivwd") - .withAutoUserSpecification("datatcfulmzxhgwzbyst"); - model = BinaryData.fromObject(model).toObject(CustomActivity.class); - Assertions.assertEquals("dadyrhmpokfxcbb", model.name()); - Assertions.assertEquals("xgajyrac", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("eouhjetxupxe", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ajalbsw", model.userProperties().get(0).name()); - Assertions.assertEquals("uwehntjss", model.linkedServiceName().referenceName()); - Assertions.assertEquals(64897243, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("rcpxlki", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("rpeyigfdppgkk", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("hupoeljf", model.referenceObjects().datasets().get(0).referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java deleted file mode 100644 index ccc911d2bebe..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomActivityTypePropertiesTests.java +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CustomActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.CustomActivityReferenceObject; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomActivityTypeProperties model = BinaryData.fromString( - "{\"command\":\"datajqfbvbv\",\"resourceLinkedService\":{\"referenceName\":\"ryrzoqyy\",\"parameters\":{\"obhltmpay\":\"datab\",\"yttosnzbbxifa\":\"dataqqgr\",\"zpvp\":\"datarhpuzca\"}},\"folderPath\":\"datatosui\",\"referenceObjects\":{\"linkedServices\":[{\"referenceName\":\"mocndbuexr\",\"parameters\":{\"dafilaizcdugn\":\"datawyxodpcgdvytnbkv\",\"nkxoqecjznuqg\":\"dataymljgaykaq\",\"ewshhqgjvchl\":\"datagvm\"}},{\"referenceName\":\"e\",\"parameters\":{\"raksahw\":\"datatczzjfzjovwiz\",\"snb\":\"datapukltfknroxm\",\"fvqtvukcfesizkn\":\"datacz\"}}],\"datasets\":[{\"referenceName\":\"xflzhgr\",\"parameters\":{\"ofdgzl\":\"dataysdmovbvnjyq\",\"voxnjbyjgobzj\":\"datakczolndwrggyt\",\"ifrjgvhone\":\"dataukfwmhzarrfttx\",\"lmkfvsol\":\"datavvab\"}},{\"referenceName\":\"jowvzyoehlj\",\"parameters\":{\"othnucqktuaerg\":\"datag\",\"dlbahmivtuphwwy\":\"datatpriicte\",\"fxfteo\":\"dataxo\",\"qap\":\"datanrziwkcpxgjmyou\"}},{\"referenceName\":\"aypcdikkmyrs\",\"parameters\":{\"gg\":\"datat\",\"sxjzklqkgjukntkn\":\"datapohuv\"}},{\"referenceName\":\"h\",\"parameters\":{\"e\":\"dataziqcw\",\"hptlnwnlauwb\":\"datad\",\"mgeuoihtik\":\"dataeszx\"}}]},\"extendedProperties\":{\"av\":\"datapgx\"},\"retentionTimeInDays\":\"datadsuwct\",\"autoUserSpecification\":\"datahcj\"}") - .toObject(CustomActivityTypeProperties.class); - Assertions.assertEquals("ryrzoqyy", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("mocndbuexr", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("xflzhgr", model.referenceObjects().datasets().get(0).referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomActivityTypeProperties model = new CustomActivityTypeProperties().withCommand("datajqfbvbv") - .withResourceLinkedService(new LinkedServiceReference().withReferenceName("ryrzoqyy") - .withParameters(mapOf("obhltmpay", "datab", "yttosnzbbxifa", "dataqqgr", "zpvp", "datarhpuzca"))) - .withFolderPath("datatosui") - .withReferenceObjects(new CustomActivityReferenceObject() - .withLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("mocndbuexr") - .withParameters(mapOf("dafilaizcdugn", "datawyxodpcgdvytnbkv", "nkxoqecjznuqg", - "dataymljgaykaq", "ewshhqgjvchl", "datagvm")), - new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("raksahw", "datatczzjfzjovwiz", "snb", "datapukltfknroxm", - "fvqtvukcfesizkn", "datacz")))) - .withDatasets(Arrays.asList( - new DatasetReference().withReferenceName("xflzhgr") - .withParameters(mapOf("ofdgzl", "dataysdmovbvnjyq", "voxnjbyjgobzj", "datakczolndwrggyt", - "ifrjgvhone", "dataukfwmhzarrfttx", "lmkfvsol", "datavvab")), - new DatasetReference().withReferenceName("jowvzyoehlj") - .withParameters(mapOf("othnucqktuaerg", "datag", "dlbahmivtuphwwy", "datatpriicte", "fxfteo", - "dataxo", "qap", "datanrziwkcpxgjmyou")), - new DatasetReference().withReferenceName("aypcdikkmyrs") - .withParameters(mapOf("gg", "datat", "sxjzklqkgjukntkn", "datapohuv")), - new DatasetReference().withReferenceName("h") - .withParameters(mapOf("e", "dataziqcw", "hptlnwnlauwb", "datad", "mgeuoihtik", "dataeszx"))))) - .withExtendedProperties(mapOf("av", "datapgx")) - .withRetentionTimeInDays("datadsuwct") - .withAutoUserSpecification("datahcj"); - model = BinaryData.fromObject(model).toObject(CustomActivityTypeProperties.class); - Assertions.assertEquals("ryrzoqyy", model.resourceLinkedService().referenceName()); - Assertions.assertEquals("mocndbuexr", model.referenceObjects().linkedServices().get(0).referenceName()); - Assertions.assertEquals("xflzhgr", model.referenceObjects().datasets().get(0).referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java deleted file mode 100644 index dbe96c96984c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDataSourceLinkedServiceTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CustomDataSourceLinkedService; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomDataSourceLinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomDataSourceLinkedService model = BinaryData.fromString( - "{\"type\":\"CustomDataSource\",\"typeProperties\":\"datadulymk\",\"connectVia\":{\"referenceName\":\"s\",\"parameters\":{\"npftkgm\":\"dataxfgha\"}},\"description\":\"vxbiu\",\"parameters\":{\"tbame\":{\"type\":\"Float\",\"defaultValue\":\"datas\"}},\"annotations\":[\"datausnaqsvruuh\",\"datancppmmwhjerlurg\"],\"\":{\"mckgpxdxgcq\":\"datanbxlep\"}}") - .toObject(CustomDataSourceLinkedService.class); - Assertions.assertEquals("s", model.connectVia().referenceName()); - Assertions.assertEquals("vxbiu", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tbame").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomDataSourceLinkedService model = new CustomDataSourceLinkedService() - .withConnectVia( - new IntegrationRuntimeReference().withReferenceName("s").withParameters(mapOf("npftkgm", "dataxfgha"))) - .withDescription("vxbiu") - .withParameters( - mapOf("tbame", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datas"))) - .withAnnotations(Arrays.asList("datausnaqsvruuh", "datancppmmwhjerlurg")) - .withTypeProperties("datadulymk"); - model = BinaryData.fromObject(model).toObject(CustomDataSourceLinkedService.class); - Assertions.assertEquals("s", model.connectVia().referenceName()); - Assertions.assertEquals("vxbiu", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("tbame").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java deleted file mode 100644 index 496e05c03212..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CustomDataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomDataset model = BinaryData.fromString( - "{\"type\":\"CustomDataset\",\"typeProperties\":\"dataekfsrm\",\"description\":\"klajvcfocz\",\"structure\":\"datapejovt\",\"schema\":\"dataxnhwh\",\"linkedServiceName\":{\"referenceName\":\"noyrzaaoeehpmj\",\"parameters\":{\"xc\":\"datajeatea\",\"zkdolrndwdbvxvza\":\"dataxoxdjxldnaryyi\"}},\"parameters\":{\"fthmcxqqxmy\":{\"type\":\"Object\",\"defaultValue\":\"dataqxlun\"},\"a\":{\"type\":\"Object\",\"defaultValue\":\"dataa\"}},\"annotations\":[\"datahrvmzrqrazadi\",\"dataznllaslk\"],\"folder\":{\"name\":\"jqjpv\"},\"\":{\"wbqgroigbsfsgsa\":\"datahxjtgzgtai\",\"xspmrjc\":\"datanwldfmhljq\",\"hsxrznmgsdaluyc\":\"dataryldsxeb\"}}") - .toObject(CustomDataset.class); - Assertions.assertEquals("klajvcfocz", model.description()); - Assertions.assertEquals("noyrzaaoeehpmj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fthmcxqqxmy").type()); - Assertions.assertEquals("jqjpv", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomDataset model = new CustomDataset().withDescription("klajvcfocz") - .withStructure("datapejovt") - .withSchema("dataxnhwh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("noyrzaaoeehpmj") - .withParameters(mapOf("xc", "datajeatea", "zkdolrndwdbvxvza", "dataxoxdjxldnaryyi"))) - .withParameters(mapOf("fthmcxqqxmy", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataqxlun"), "a", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataa"))) - .withAnnotations(Arrays.asList("datahrvmzrqrazadi", "dataznllaslk")) - .withFolder(new DatasetFolder().withName("jqjpv")) - .withTypeProperties("dataekfsrm"); - model = BinaryData.fromObject(model).toObject(CustomDataset.class); - Assertions.assertEquals("klajvcfocz", model.description()); - Assertions.assertEquals("noyrzaaoeehpmj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fthmcxqqxmy").type()); - Assertions.assertEquals("jqjpv", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java deleted file mode 100644 index 25a51e510b5a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CustomEventsTrigger; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class CustomEventsTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomEventsTrigger model = BinaryData.fromString( - "{\"type\":\"CustomEventsTrigger\",\"typeProperties\":{\"subjectBeginsWith\":\"vdzzukhlwvvh\",\"subjectEndsWith\":\"kadmihgbtnlejzda\",\"events\":[\"datak\",\"datajvikpgzkfjqo\"],\"scope\":\"bpjlr\"},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"jxjdlgign\",\"name\":\"lccix\"},\"parameters\":{\"jznzgmfufszvsji\":\"datazgbuhcrwqrf\",\"lcqaafuwxeho\":\"datajve\",\"q\":\"dataazbgcbd\"}},{\"pipelineReference\":{\"referenceName\":\"wvtimy\",\"name\":\"dogn\"},\"parameters\":{\"jns\":\"datavgowkak\",\"dv\":\"datajiykwbytuzhcpx\"}}],\"description\":\"xv\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datay\",\"dataiyovcrmoalvea\",\"datauz\"],\"\":{\"rrikv\":\"datarajpedowmhg\"}}") - .toObject(CustomEventsTrigger.class); - Assertions.assertEquals("xv", model.description()); - Assertions.assertEquals("jxjdlgign", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("lccix", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vdzzukhlwvvh", model.subjectBeginsWith()); - Assertions.assertEquals("kadmihgbtnlejzda", model.subjectEndsWith()); - Assertions.assertEquals("bpjlr", model.scope()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomEventsTrigger model = new CustomEventsTrigger().withDescription("xv") - .withAnnotations(Arrays.asList("datay", "dataiyovcrmoalvea", "datauz")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("jxjdlgign").withName("lccix")) - .withParameters( - mapOf("jznzgmfufszvsji", "datazgbuhcrwqrf", "lcqaafuwxeho", "datajve", "q", "dataazbgcbd")), - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("wvtimy").withName("dogn")) - .withParameters(mapOf("jns", "datavgowkak", "dv", "datajiykwbytuzhcpx")))) - .withSubjectBeginsWith("vdzzukhlwvvh") - .withSubjectEndsWith("kadmihgbtnlejzda") - .withEvents(Arrays.asList("datak", "datajvikpgzkfjqo")) - .withScope("bpjlr"); - model = BinaryData.fromObject(model).toObject(CustomEventsTrigger.class); - Assertions.assertEquals("xv", model.description()); - Assertions.assertEquals("jxjdlgign", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("lccix", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals("vdzzukhlwvvh", model.subjectBeginsWith()); - Assertions.assertEquals("kadmihgbtnlejzda", model.subjectEndsWith()); - Assertions.assertEquals("bpjlr", model.scope()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java deleted file mode 100644 index e2adcf205514..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomEventsTriggerTypePropertiesTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.CustomEventsTriggerTypeProperties; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class CustomEventsTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomEventsTriggerTypeProperties model = BinaryData.fromString( - "{\"subjectBeginsWith\":\"gxnopdeqqfycwbu\",\"subjectEndsWith\":\"fikiu\",\"events\":[\"datavpxptqbwn\"],\"scope\":\"ilgamxnj\"}") - .toObject(CustomEventsTriggerTypeProperties.class); - Assertions.assertEquals("gxnopdeqqfycwbu", model.subjectBeginsWith()); - Assertions.assertEquals("fikiu", model.subjectEndsWith()); - Assertions.assertEquals("ilgamxnj", model.scope()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomEventsTriggerTypeProperties model - = new CustomEventsTriggerTypeProperties().withSubjectBeginsWith("gxnopdeqqfycwbu") - .withSubjectEndsWith("fikiu") - .withEvents(Arrays.asList("datavpxptqbwn")) - .withScope("ilgamxnj"); - model = BinaryData.fromObject(model).toObject(CustomEventsTriggerTypeProperties.class); - Assertions.assertEquals("gxnopdeqqfycwbu", model.subjectBeginsWith()); - Assertions.assertEquals("fikiu", model.subjectEndsWith()); - Assertions.assertEquals("ilgamxnj", model.scope()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java deleted file mode 100644 index a748844197db..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/CustomSetupBaseTests.java +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CustomSetupBase; - -public final class CustomSetupBaseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CustomSetupBase model = BinaryData.fromString("{\"type\":\"CustomSetupBase\"}").toObject(CustomSetupBase.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CustomSetupBase model = new CustomSetupBase(); - model = BinaryData.fromObject(model).toObject(CustomSetupBase.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java deleted file mode 100644 index 5cc03d183dc8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandDefaultValueTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DWCopyCommandDefaultValue; - -public final class DWCopyCommandDefaultValueTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DWCopyCommandDefaultValue model - = BinaryData.fromString("{\"columnName\":\"dataxllfwxdou\",\"defaultValue\":\"datazpaqjahjxgedtmzh\"}") - .toObject(DWCopyCommandDefaultValue.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DWCopyCommandDefaultValue model - = new DWCopyCommandDefaultValue().withColumnName("dataxllfwxdou").withDefaultValue("datazpaqjahjxgedtmzh"); - model = BinaryData.fromObject(model).toObject(DWCopyCommandDefaultValue.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java deleted file mode 100644 index fa313cf86a1b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DWCopyCommandSettingsTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DWCopyCommandDefaultValue; -import com.azure.resourcemanager.datafactory.models.DWCopyCommandSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DWCopyCommandSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DWCopyCommandSettings model = BinaryData.fromString( - "{\"defaultValues\":[{\"columnName\":\"datapdcbhe\",\"defaultValue\":\"datautuaysxhfupvq\"},{\"columnName\":\"dataqlafi\",\"defaultValue\":\"dataw\"},{\"columnName\":\"dataipuyefhhdrmg\",\"defaultValue\":\"datafgyxkgqwmpghxpcx\"}],\"additionalOptions\":{\"dhx\":\"nkxhc\"}}") - .toObject(DWCopyCommandSettings.class); - Assertions.assertEquals("nkxhc", model.additionalOptions().get("dhx")); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DWCopyCommandSettings model - = new DWCopyCommandSettings() - .withDefaultValues( - Arrays - .asList( - new DWCopyCommandDefaultValue().withColumnName("datapdcbhe") - .withDefaultValue("datautuaysxhfupvq"), - new DWCopyCommandDefaultValue().withColumnName("dataqlafi").withDefaultValue("dataw"), - new DWCopyCommandDefaultValue().withColumnName("dataipuyefhhdrmg") - .withDefaultValue("datafgyxkgqwmpghxpcx"))) - .withAdditionalOptions(mapOf("dhx", "nkxhc")); - model = BinaryData.fromObject(model).toObject(DWCopyCommandSettings.class); - Assertions.assertEquals("nkxhc", model.additionalOptions().get("dhx")); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java deleted file mode 100644 index d64f173e5a63..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandPayloadTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugCommandPayloadTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugCommandPayload model = BinaryData.fromString( - "{\"streamName\":\"uartvti\",\"rowLimits\":1848545940,\"columns\":[\"chnmna\",\"mnxhkxjqirwrweo\"],\"expression\":\"ffifhx\"}") - .toObject(DataFlowDebugCommandPayload.class); - Assertions.assertEquals("uartvti", model.streamName()); - Assertions.assertEquals(1848545940, model.rowLimits()); - Assertions.assertEquals("chnmna", model.columns().get(0)); - Assertions.assertEquals("ffifhx", model.expression()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugCommandPayload model = new DataFlowDebugCommandPayload().withStreamName("uartvti") - .withRowLimits(1848545940) - .withColumns(Arrays.asList("chnmna", "mnxhkxjqirwrweo")) - .withExpression("ffifhx"); - model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandPayload.class); - Assertions.assertEquals("uartvti", model.streamName()); - Assertions.assertEquals(1848545940, model.rowLimits()); - Assertions.assertEquals("chnmna", model.columns().get(0)); - Assertions.assertEquals("ffifhx", model.expression()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java deleted file mode 100644 index 1199c7088469..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandRequestTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandRequest; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandType; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugCommandRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugCommandRequest model = BinaryData.fromString( - "{\"sessionId\":\"fcbahhp\",\"command\":\"executeExpressionQuery\",\"commandPayload\":{\"streamName\":\"o\",\"rowLimits\":2134608057,\"columns\":[\"filkmkkholv\"],\"expression\":\"dviauogp\"}}") - .toObject(DataFlowDebugCommandRequest.class); - Assertions.assertEquals("fcbahhp", model.sessionId()); - Assertions.assertEquals(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY, model.command()); - Assertions.assertEquals("o", model.commandPayload().streamName()); - Assertions.assertEquals(2134608057, model.commandPayload().rowLimits()); - Assertions.assertEquals("filkmkkholv", model.commandPayload().columns().get(0)); - Assertions.assertEquals("dviauogp", model.commandPayload().expression()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugCommandRequest model = new DataFlowDebugCommandRequest().withSessionId("fcbahhp") - .withCommand(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY) - .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("o") - .withRowLimits(2134608057) - .withColumns(Arrays.asList("filkmkkholv")) - .withExpression("dviauogp")); - model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandRequest.class); - Assertions.assertEquals("fcbahhp", model.sessionId()); - Assertions.assertEquals(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY, model.command()); - Assertions.assertEquals("o", model.commandPayload().streamName()); - Assertions.assertEquals(2134608057, model.commandPayload().rowLimits()); - Assertions.assertEquals("filkmkkholv", model.commandPayload().columns().get(0)); - Assertions.assertEquals("dviauogp", model.commandPayload().expression()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java deleted file mode 100644 index 47870222c8a4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugCommandResponseInnerTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugCommandResponseInner; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugCommandResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugCommandResponseInner model - = BinaryData.fromString("{\"status\":\"snewmozqvbub\",\"data\":\"amhsycxhxzgazt\"}") - .toObject(DataFlowDebugCommandResponseInner.class); - Assertions.assertEquals("snewmozqvbub", model.status()); - Assertions.assertEquals("amhsycxhxzgazt", model.data()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugCommandResponseInner model - = new DataFlowDebugCommandResponseInner().withStatus("snewmozqvbub").withData("amhsycxhxzgazt"); - model = BinaryData.fromObject(model).toObject(DataFlowDebugCommandResponseInner.class); - Assertions.assertEquals("snewmozqvbub", model.status()); - Assertions.assertEquals("amhsycxhxzgazt", model.data()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java deleted file mode 100644 index 5549a1bcae51..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageDebugSettingsTests.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings; -import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugPackageDebugSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugPackageDebugSettings model = BinaryData.fromString( - "{\"sourceSettings\":[{\"sourceName\":\"cnqmxqpsw\",\"rowLimit\":2027239261,\"\":{\"gdhbe\":\"datahl\"}},{\"sourceName\":\"qkzszuwiwtglxxh\",\"rowLimit\":1438070602,\"\":{\"pqcbfrmbodthsq\":\"datapicrmnzhrgmqgjsx\",\"fr\":\"datagvriibakclac\"}},{\"sourceName\":\"ousxauzlwvsgmw\",\"rowLimit\":414245170,\"\":{\"mmkjsvthnwpztek\":\"dataizvu\",\"gplucfotangcfhny\":\"datavmribiat\",\"vtxnjmxmcuqud\":\"datazcugswvxwlmzqw\"}}],\"parameters\":{\"dkvgfabuiyjibuzp\":\"dataclxyn\"},\"datasetParameters\":\"dataugneikn\"}") - .toObject(DataFlowDebugPackageDebugSettings.class); - Assertions.assertEquals("cnqmxqpsw", model.sourceSettings().get(0).sourceName()); - Assertions.assertEquals(2027239261, model.sourceSettings().get(0).rowLimit()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugPackageDebugSettings model = new DataFlowDebugPackageDebugSettings() - .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("cnqmxqpsw") - .withRowLimit(2027239261) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("qkzszuwiwtglxxh") - .withRowLimit(1438070602) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("ousxauzlwvsgmw") - .withRowLimit(414245170) - .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("dkvgfabuiyjibuzp", "dataclxyn")) - .withDatasetParameters("dataugneikn"); - model = BinaryData.fromObject(model).toObject(DataFlowDebugPackageDebugSettings.class); - Assertions.assertEquals("cnqmxqpsw", model.sourceSettings().get(0).sourceName()); - Assertions.assertEquals(2027239261, model.sourceSettings().get(0).rowLimit()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java deleted file mode 100644 index 3fb39c218ed2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugPackageTests.java +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackage; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugResource; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting; -import com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetDebugResource; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugPackageTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugPackage model = BinaryData.fromString( - "{\"sessionId\":\"ryxynqnzrd\",\"dataFlow\":{\"properties\":{\"type\":\"DataFlow\",\"description\":\"vwxzn\",\"annotations\":[\"dataoeiy\",\"datab\",\"databp\",\"datahv\"],\"folder\":{\"name\":\"kvntjlrigjkskyri\"}},\"name\":\"vzidsxwaab\"},\"dataFlows\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"rygznmmaxriz\",\"annotations\":[\"databgopxlhslnel\",\"dataieixynllxe\"],\"folder\":{\"name\":\"rojphslhcawjutif\"}},\"name\":\"fmvigorqjbttzh\"},{\"properties\":{\"type\":\"DataFlow\",\"description\":\"glka\",\"annotations\":[\"datan\",\"datajuj\",\"dataickpz\",\"datacpopmxel\"],\"folder\":{\"name\":\"ltyjedexxmlfmk\"}},\"name\":\"cazuaw\"}],\"datasets\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"puamwabzxr\",\"structure\":\"datacush\",\"schema\":\"datahaivm\",\"linkedServiceName\":{\"referenceName\":\"yasflvgsgzwy\",\"parameters\":{\"knsmjblmljhlnymz\":\"dataoi\"}},\"parameters\":{\"gtayxonsupeujlz\":{\"type\":\"Bool\",\"defaultValue\":\"datayuzcbmqqvxmvw\"},\"nzoibgsxgnx\":{\"type\":\"SecureString\",\"defaultValue\":\"datacvsql\"},\"bxiqxeiiqbimht\":{\"type\":\"Int\",\"defaultValue\":\"dataonmpqoxwdof\"},\"qpofvwbc\":{\"type\":\"Float\",\"defaultValue\":\"datainheh\"}},\"annotations\":[\"datambnkb\",\"datavqvxk\"],\"folder\":{\"name\":\"qihebw\"},\"\":{\"gi\":\"databzuwfmdurag\",\"igkxkbsazga\":\"datavcjfelisdjubggb\",\"apvu\":\"datagacyrcmjdmspo\",\"zjedmstkvnlv\":\"datarylniofrzg\"}},\"name\":\"c\"}],\"linkedServices\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"nktwfansnvpdibmi\",\"parameters\":{\"yls\":\"datatbzbkiwbuqnyophz\"}},\"description\":\"rpfbcunezz\",\"parameters\":{\"psihcla\":{\"type\":\"Bool\",\"defaultValue\":\"datafwyfwlwxjwet\"},\"rsqqwztcm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataaylp\"},\"jexfdeqvhp\":{\"type\":\"Array\",\"defaultValue\":\"datachcxwaxfewzj\"}},\"annotations\":[\"datakkshkbffm\",\"datamxzjrgywwpgjx\",\"datanptfujgi\",\"datagaao\"],\"\":{\"swvr\":\"datataqutdewem\",\"kimrt\":\"dataunzzjgehk\",\"jqepqwhi\":\"dataxokffqyin\"}},\"name\":\"onsts\"}],\"staging\":{\"linkedService\":{\"referenceName\":\"xgvelfclduccbird\",\"parameters\":{\"stmninwjizcilng\":\"datawcobie\"}},\"folderPath\":\"datashejjtbxqm\"},\"debugSettings\":{\"sourceSettings\":[{\"sourceName\":\"xqzv\",\"rowLimit\":411885173,\"\":{\"qbsms\":\"dataycucrwnamikzeb\",\"kzruswh\":\"dataziqgfuh\",\"ycjsx\":\"datahczznvf\",\"xqhndvnoamlds\":\"datawwixzvumw\"}},{\"sourceName\":\"aohdjh\",\"rowLimit\":1043529198,\"\":{\"agltsxoa\":\"datakxcoxpelnje\",\"npbs\":\"dataftgz\"}}],\"parameters\":{\"ipgawtxx\":\"datafloccsrmozih\"},\"datasetParameters\":\"datay\"},\"\":{\"pcycilrmcaykg\":\"datacjxgrytf\",\"pndfcpfnznt\":\"datanoxuztrksx\",\"xuzvoamktcqi\":\"datajtwkjaos\",\"rtltla\":\"datasmgbzahgxqdl\"}}") - .toObject(DataFlowDebugPackage.class); - Assertions.assertEquals("ryxynqnzrd", model.sessionId()); - Assertions.assertEquals("vzidsxwaab", model.dataFlow().name()); - Assertions.assertEquals("vwxzn", model.dataFlow().properties().description()); - Assertions.assertEquals("kvntjlrigjkskyri", model.dataFlow().properties().folder().name()); - Assertions.assertEquals("fmvigorqjbttzh", model.dataFlows().get(0).name()); - Assertions.assertEquals("rygznmmaxriz", model.dataFlows().get(0).properties().description()); - Assertions.assertEquals("rojphslhcawjutif", model.dataFlows().get(0).properties().folder().name()); - Assertions.assertEquals("c", model.datasets().get(0).name()); - Assertions.assertEquals("puamwabzxr", model.datasets().get(0).properties().description()); - Assertions.assertEquals("yasflvgsgzwy", - model.datasets().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, - model.datasets().get(0).properties().parameters().get("gtayxonsupeujlz").type()); - Assertions.assertEquals("qihebw", model.datasets().get(0).properties().folder().name()); - Assertions.assertEquals("onsts", model.linkedServices().get(0).name()); - Assertions.assertEquals("nktwfansnvpdibmi", - model.linkedServices().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("rpfbcunezz", model.linkedServices().get(0).properties().description()); - Assertions.assertEquals(ParameterType.BOOL, - model.linkedServices().get(0).properties().parameters().get("psihcla").type()); - Assertions.assertEquals("xgvelfclduccbird", model.staging().linkedService().referenceName()); - Assertions.assertEquals("xqzv", model.debugSettings().sourceSettings().get(0).sourceName()); - Assertions.assertEquals(411885173, model.debugSettings().sourceSettings().get(0).rowLimit()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugPackage model = new DataFlowDebugPackage().withSessionId("ryxynqnzrd") - .withDataFlow(new DataFlowDebugResource().withName("vzidsxwaab") - .withProperties(new DataFlow().withDescription("vwxzn") - .withAnnotations(Arrays.asList("dataoeiy", "datab", "databp", "datahv")) - .withFolder(new DataFlowFolder().withName("kvntjlrigjkskyri")))) - .withDataFlows(Arrays.asList( - new DataFlowDebugResource().withName("fmvigorqjbttzh") - .withProperties(new DataFlow().withDescription("rygznmmaxriz") - .withAnnotations(Arrays.asList("databgopxlhslnel", "dataieixynllxe")) - .withFolder(new DataFlowFolder().withName("rojphslhcawjutif"))), - new DataFlowDebugResource().withName("cazuaw") - .withProperties(new DataFlow().withDescription("glka") - .withAnnotations(Arrays.asList("datan", "datajuj", "dataickpz", "datacpopmxel")) - .withFolder(new DataFlowFolder().withName("ltyjedexxmlfmk"))))) - .withDatasets(Arrays.asList(new DatasetDebugResource().withName("c") - .withProperties(new Dataset().withDescription("puamwabzxr") - .withStructure("datacush") - .withSchema("datahaivm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yasflvgsgzwy") - .withParameters(mapOf("knsmjblmljhlnymz", "dataoi"))) - .withParameters(mapOf("gtayxonsupeujlz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datayuzcbmqqvxmvw"), - "nzoibgsxgnx", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datacvsql"), - "bxiqxeiiqbimht", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataonmpqoxwdof"), - "qpofvwbc", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datainheh"))) - .withAnnotations(Arrays.asList("datambnkb", "datavqvxk")) - .withFolder(new DatasetFolder().withName("qihebw")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) - .withLinkedServices(Arrays.asList(new LinkedServiceDebugResource().withName("onsts") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("nktwfansnvpdibmi") - .withParameters(mapOf("yls", "datatbzbkiwbuqnyophz"))) - .withDescription("rpfbcunezz") - .withParameters(mapOf("psihcla", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafwyfwlwxjwet"), - "rsqqwztcm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataaylp"), - "jexfdeqvhp", - new ParameterSpecification().withType(ParameterType.ARRAY) - .withDefaultValue("datachcxwaxfewzj"))) - .withAnnotations(Arrays.asList("datakkshkbffm", "datamxzjrgywwpgjx", "datanptfujgi", "datagaao")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withStaging(new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("xgvelfclduccbird") - .withParameters(mapOf("stmninwjizcilng", "datawcobie"))) - .withFolderPath("datashejjtbxqm")) - .withDebugSettings(new DataFlowDebugPackageDebugSettings() - .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("xqzv") - .withRowLimit(411885173) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("aohdjh") - .withRowLimit(1043529198) - .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("ipgawtxx", "datafloccsrmozih")) - .withDatasetParameters("datay")) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DataFlowDebugPackage.class); - Assertions.assertEquals("ryxynqnzrd", model.sessionId()); - Assertions.assertEquals("vzidsxwaab", model.dataFlow().name()); - Assertions.assertEquals("vwxzn", model.dataFlow().properties().description()); - Assertions.assertEquals("kvntjlrigjkskyri", model.dataFlow().properties().folder().name()); - Assertions.assertEquals("fmvigorqjbttzh", model.dataFlows().get(0).name()); - Assertions.assertEquals("rygznmmaxriz", model.dataFlows().get(0).properties().description()); - Assertions.assertEquals("rojphslhcawjutif", model.dataFlows().get(0).properties().folder().name()); - Assertions.assertEquals("c", model.datasets().get(0).name()); - Assertions.assertEquals("puamwabzxr", model.datasets().get(0).properties().description()); - Assertions.assertEquals("yasflvgsgzwy", - model.datasets().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, - model.datasets().get(0).properties().parameters().get("gtayxonsupeujlz").type()); - Assertions.assertEquals("qihebw", model.datasets().get(0).properties().folder().name()); - Assertions.assertEquals("onsts", model.linkedServices().get(0).name()); - Assertions.assertEquals("nktwfansnvpdibmi", - model.linkedServices().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("rpfbcunezz", model.linkedServices().get(0).properties().description()); - Assertions.assertEquals(ParameterType.BOOL, - model.linkedServices().get(0).properties().parameters().get("psihcla").type()); - Assertions.assertEquals("xgvelfclduccbird", model.staging().linkedService().referenceName()); - Assertions.assertEquals("xqzv", model.debugSettings().sourceSettings().get(0).sourceName()); - Assertions.assertEquals(411885173, model.debugSettings().sourceSettings().get(0).rowLimit()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java deleted file mode 100644 index 820891068d8a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugResourceTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugResource; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"ltzkatbhjmznnb\",\"annotations\":[\"dataeq\",\"datalarvlagunbtg\"],\"folder\":{\"name\":\"wlnbm\"}},\"name\":\"reeudzqavb\"}") - .toObject(DataFlowDebugResource.class); - Assertions.assertEquals("reeudzqavb", model.name()); - Assertions.assertEquals("ltzkatbhjmznnb", model.properties().description()); - Assertions.assertEquals("wlnbm", model.properties().folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugResource model = new DataFlowDebugResource().withName("reeudzqavb") - .withProperties(new DataFlow().withDescription("ltzkatbhjmznnb") - .withAnnotations(Arrays.asList("dataeq", "datalarvlagunbtg")) - .withFolder(new DataFlowFolder().withName("wlnbm"))); - model = BinaryData.fromObject(model).toObject(DataFlowDebugResource.class); - Assertions.assertEquals("reeudzqavb", model.name()); - Assertions.assertEquals("ltzkatbhjmznnb", model.properties().description()); - Assertions.assertEquals("wlnbm", model.properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java deleted file mode 100644 index 3922c87cc0b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionInfoInnerTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugSessionInfoInner; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowDebugSessionInfoInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowDebugSessionInfoInner model = BinaryData.fromString( - "{\"dataFlowName\":\"bwefqsfapaqtfer\",\"computeType\":\"wexjkmfxapjwogq\",\"coreCount\":211935178,\"nodeCount\":775989398,\"integrationRuntimeName\":\"dcdab\",\"sessionId\":\"wpwyawbz\",\"startTime\":\"qbucljgkyexaoguy\",\"timeToLiveInMinutes\":166467616,\"lastActivityTime\":\"dsdaultxijjumf\",\"\":{\"nqnm\":\"dataz\",\"qdqx\":\"datajng\",\"zsvtuikzhajqgl\":\"databjwgnyfus\",\"l\":\"datafh\"}}") - .toObject(DataFlowDebugSessionInfoInner.class); - Assertions.assertEquals("bwefqsfapaqtfer", model.dataFlowName()); - Assertions.assertEquals("wexjkmfxapjwogq", model.computeType()); - Assertions.assertEquals(211935178, model.coreCount()); - Assertions.assertEquals(775989398, model.nodeCount()); - Assertions.assertEquals("dcdab", model.integrationRuntimeName()); - Assertions.assertEquals("wpwyawbz", model.sessionId()); - Assertions.assertEquals("qbucljgkyexaoguy", model.startTime()); - Assertions.assertEquals(166467616, model.timeToLiveInMinutes()); - Assertions.assertEquals("dsdaultxijjumf", model.lastActivityTime()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowDebugSessionInfoInner model = new DataFlowDebugSessionInfoInner().withDataFlowName("bwefqsfapaqtfer") - .withComputeType("wexjkmfxapjwogq") - .withCoreCount(211935178) - .withNodeCount(775989398) - .withIntegrationRuntimeName("dcdab") - .withSessionId("wpwyawbz") - .withStartTime("qbucljgkyexaoguy") - .withTimeToLiveInMinutes(166467616) - .withLastActivityTime("dsdaultxijjumf") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DataFlowDebugSessionInfoInner.class); - Assertions.assertEquals("bwefqsfapaqtfer", model.dataFlowName()); - Assertions.assertEquals("wexjkmfxapjwogq", model.computeType()); - Assertions.assertEquals(211935178, model.coreCount()); - Assertions.assertEquals(775989398, model.nodeCount()); - Assertions.assertEquals("dcdab", model.integrationRuntimeName()); - Assertions.assertEquals("wpwyawbz", model.sessionId()); - Assertions.assertEquals("qbucljgkyexaoguy", model.startTime()); - Assertions.assertEquals(166467616, model.timeToLiveInMinutes()); - Assertions.assertEquals("dsdaultxijjumf", model.lastActivityTime()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java deleted file mode 100644 index 37a1427e0949..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsAddDataFlowWithResponseMockTests.java +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.AddDataFlowToDebugSessionResponse; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackage; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugResource; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting; -import com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetDebugResource; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowDebugSessionsAddDataFlowWithResponseMockTests { - @Test - public void testAddDataFlowWithResponse() throws Exception { - String responseStr = "{\"jobVersion\":\"pbfxvlacllteuksg\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AddDataFlowToDebugSessionResponse response = manager.dataFlowDebugSessions() - .addDataFlowWithResponse("bfbzhczyhtj", "tzlflqpanceow", - new DataFlowDebugPackage().withSessionId("qzxluozmedaqpqu") - .withDataFlow(new DataFlowDebugResource().withName("jzjjrhv") - .withProperties(new DataFlow().withDescription("mruxebslaus") - .withAnnotations(Arrays.asList("datatctiso")) - .withFolder(new DataFlowFolder().withName("fyggbac")))) - .withDataFlows(Arrays.asList( - new DataFlowDebugResource().withName("blvpwuqqvmfuuh") - .withProperties(new DataFlow().withDescription("tmbbjil") - .withAnnotations(Arrays.asList("datactykc", "dataksvflurrfnlhlfv")) - .withFolder(new DataFlowFolder().withName("hy"))), - new DataFlowDebugResource().withName("fpemcfhxk") - .withProperties(new DataFlow().withDescription("tshgcrcxam") - .withAnnotations(Arrays.asList("datapzvvrfplkemvvl", "dataezyishi")) - .withFolder(new DataFlowFolder().withName("wplyv"))), - new DataFlowDebugResource().withName("lyaelv") - .withProperties(new DataFlow().withDescription("jvilazop") - .withAnnotations(Arrays.asList("databewbxaufo", "datahmdpggaktugephq", "dataoc")) - .withFolder(new DataFlowFolder().withName("omqwfwtwjzzyiib"))))) - .withDatasets(Arrays.asList( - new DatasetDebugResource().withName("vairau") - .withProperties(new Dataset().withDescription("cta") - .withStructure("datadj") - .withSchema("dataas") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jwtyvvkn") - .withParameters(mapOf("uojnikwzlowusa", "datajybtwgdlfg", "tdf", "datadmjiz"))) - .withParameters(mapOf("jce", new ParameterSpecification().withType(ParameterType.FLOAT), - "ays", new ParameterSpecification().withType(ParameterType.STRING), "pavutiszwycle", - new ParameterSpecification().withType(ParameterType.BOOL))) - .withAnnotations(Arrays.asList("databjmwetfmpzamq", "datacitlyyphtdwh", - "datawxhvspumokmyms", "dataatpvebxesucr")) - .withFolder(new DatasetFolder().withName("qxyowwrbxe")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetDebugResource().withName("xjvsqzchysqy") - .withProperties(new Dataset().withDescription("fbmqgimwi") - .withStructure("dataphd") - .withSchema("datawfajajpojzazzaks") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("twnhpcfsqdzi") - .withParameters(mapOf("sa", "datak", "vinvryxwzxj", "dataevnerpyzuu"))) - .withParameters(mapOf("ei", new ParameterSpecification().withType(ParameterType.BOOL), - "ymdxwok", new ParameterSpecification().withType(ParameterType.BOOL), "aihgiglkins", - new ParameterSpecification().withType(ParameterType.BOOL))) - .withAnnotations(Arrays.asList("datagabhpfpvadyx", "datacc", "datahg")) - .withFolder(new DatasetFolder().withName("hzvrqbzlmvwuf")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetDebugResource().withName("corydjsaki") - .withProperties(new Dataset().withDescription("sqmikljc") - .withStructure("dataot") - .withSchema("dataxlpqfxyyws") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rxvjvwkzaqqkq") - .withParameters(mapOf("mgxunldbkuqcnji", "datayzhmfuksqiq"))) - .withParameters( - mapOf("bjkstvbmfnjuzvw", new ParameterSpecification().withType(ParameterType.BOOL), - "wyyyjage", new ParameterSpecification().withType(ParameterType.INT))) - .withAnnotations(Arrays.asList("datahxjwiggca", "datamkoxpay", "datazqgsaegaah")) - .withFolder(new DatasetFolder().withName("rdxhgrg")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetDebugResource().withName("qkkwzbgbwwop") - .withProperties(new Dataset().withDescription("lmiglnqrmqefdq") - .withStructure("datasfebhvkkpd") - .withSchema("datakylioagvijrdyote") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ljdrerzjwe") - .withParameters(mapOf("beuf", "datar", "jsneybpqotfd", "databddcxfuizokzmeg"))) - .withParameters(mapOf("s", new ParameterSpecification().withType(ParameterType.INT))) - .withAnnotations(Arrays.asList("datauptre", "datalauupwt", "datatpbi")) - .withFolder(new DatasetFolder().withName("segcogyctekaa")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) - .withLinkedServices( - Arrays - .asList( - new LinkedServiceDebugResource().withName("v") - .withProperties(new LinkedService() - .withConnectVia( - new IntegrationRuntimeReference().withReferenceName("sekrzcnlqstmik") - .withParameters(mapOf("k", "datatwvh"))) - .withDescription("arqtkzeopoxd") - .withParameters( - mapOf("nqt", new ParameterSpecification().withType(ParameterType.STRING))) - .withAnnotations(Arrays.asList("datawfjrtxfshaqpml", "datazwgotlbflbax")) - .withAdditionalProperties(mapOf("type", "LinkedService"))), - new LinkedServiceDebugResource().withName("lxqjshyyrc") - .withProperties(new LinkedService() - .withConnectVia( - new IntegrationRuntimeReference().withReferenceName("mdrbmff") - .withParameters(mapOf("nplzbzc", "datayykwwhscubgwzm", "ogjoocnseo", - "datazhdrvkzzvh"))) - .withDescription("ktqrvzaabeiqo") - .withParameters(mapOf( - "glgxvqdlw", new ParameterSpecification().withType(ParameterType.BOOL), - "kzkhbiee", new ParameterSpecification().withType(ParameterType.BOOL), "bp", - new ParameterSpecification().withType(ParameterType.ARRAY), "jteprqot", - new ParameterSpecification().withType(ParameterType.ARRAY))) - .withAnnotations( - Arrays.asList("dataapmt", "datafgswpqunvxtvmb", "dataydqoqpqyj")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withStaging(new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("z") - .withParameters(mapOf("kgwf", "dataaurmq", "xhmrhhxlibd", "datalt"))) - .withFolderPath("dataeamslvpxsywn")) - .withDebugSettings(new DataFlowDebugPackageDebugSettings() - .withSourceSettings(Arrays.asList( - new DataFlowSourceSetting().withSourceName("dyaauls") - .withRowLimit(2051357393) - .withAdditionalProperties(mapOf()), - new DataFlowSourceSetting().withSourceName("bhkyas") - .withRowLimit(146099985) - .withAdditionalProperties(mapOf()))) - .withParameters(mapOf("weqbeygnetuvsqv", "datau", "znblkofdmlrtlhp", "datajvum", "wqdjzbogtbyk", - "dataucfiwaklf")) - .withDatasetParameters("dataqxxy")) - .withAdditionalProperties(mapOf()), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("pbfxvlacllteuksg", response.jobVersion()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java deleted file mode 100644 index 6c741ca4186f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsCreateMockTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest; -import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionResponse; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowDebugSessionsCreateMockTests { - @Test - public void testCreate() throws Exception { - String responseStr = "{\"status\":\"mcjpjrxvsgg\",\"sessionId\":\"brkrfvlqwije\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CreateDataFlowDebugSessionResponse response = manager.dataFlowDebugSessions() - .create("yortbresx", "cpbtvgiokz", - new CreateDataFlowDebugSessionRequest().withComputeType("pvswd") - .withCoreCount(377588877) - .withTimeToLive(1834160720) - .withIntegrationRuntime(new IntegrationRuntimeDebugResource().withName("hqxhtcohrhwxv") - .withProperties(new IntegrationRuntime().withDescription("ussgjmubg") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime")))), - com.azure.core.util.Context.NONE); - - Assertions.assertEquals("mcjpjrxvsgg", response.status()); - Assertions.assertEquals("brkrfvlqwije", response.sessionId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java deleted file mode 100644 index aaeef898ce09..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowDebugSessionsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.dataFlowDebugSessions() - .deleteWithResponse("ccotgqgevie", "yhowvnzwhypj", - new DeleteDataFlowDebugSessionRequest().withSessionId("palpt"), com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java deleted file mode 100644 index 5dca62680006..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsExecuteCommandMockTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandRequest; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandResponse; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowDebugSessionsExecuteCommandMockTests { - @Test - public void testExecuteCommand() throws Exception { - String responseStr = "{\"status\":\"klnrzoafxoyddush\",\"data\":\"jhh\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DataFlowDebugCommandResponse response = manager.dataFlowDebugSessions() - .executeCommand("psbdcheydctsqxw", "sszdw", - new DataFlowDebugCommandRequest().withSessionId("gbzmcprtanag") - .withCommand(DataFlowDebugCommandType.EXECUTE_EXPRESSION_QUERY) - .withCommandPayload(new DataFlowDebugCommandPayload().withStreamName("hwkaatjssebyd") - .withRowLimits(2122847597) - .withColumns(Arrays.asList("lpiccx")) - .withExpression("twstqgc")), - com.azure.core.util.Context.NONE); - - Assertions.assertEquals("klnrzoafxoyddush", response.status()); - Assertions.assertEquals("jhh", response.data()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java deleted file mode 100644 index 5314c0f74b3f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowDebugSessionsQueryByFactoryMockTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DataFlowDebugSessionInfo; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowDebugSessionsQueryByFactoryMockTests { - @Test - public void testQueryByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"dataFlowName\":\"ccpg\",\"computeType\":\"ixxxgltqldlh\",\"coreCount\":911012397,\"nodeCount\":209988622,\"integrationRuntimeName\":\"dam\",\"sessionId\":\"ajh\",\"startTime\":\"bpsnogyvpfyjlfn\",\"timeToLiveInMinutes\":1285291253,\"lastActivityTime\":\"toqhyprpwkvz\",\"\":{\"o\":\"datadlhc\",\"jqz\":\"datakmpxtfcrugitjnw\",\"eaqnbkcqoyqmbu\":\"datagq\"}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.dataFlowDebugSessions() - .queryByFactory("enpihtgi", "aeeqgpviroz", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("ccpg", response.iterator().next().dataFlowName()); - Assertions.assertEquals("ixxxgltqldlh", response.iterator().next().computeType()); - Assertions.assertEquals(911012397, response.iterator().next().coreCount()); - Assertions.assertEquals(209988622, response.iterator().next().nodeCount()); - Assertions.assertEquals("dam", response.iterator().next().integrationRuntimeName()); - Assertions.assertEquals("ajh", response.iterator().next().sessionId()); - Assertions.assertEquals("bpsnogyvpfyjlfn", response.iterator().next().startTime()); - Assertions.assertEquals(1285291253, response.iterator().next().timeToLiveInMinutes()); - Assertions.assertEquals("toqhyprpwkvz", response.iterator().next().lastActivityTime()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java deleted file mode 100644 index 75447220b605..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowFolderTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowFolderTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowFolder model = BinaryData.fromString("{\"name\":\"nmfpp\"}").toObject(DataFlowFolder.class); - Assertions.assertEquals("nmfpp", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowFolder model = new DataFlowFolder().withName("nmfpp"); - model = BinaryData.fromObject(model).toObject(DataFlowFolder.class); - Assertions.assertEquals("nmfpp", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java deleted file mode 100644 index 5c3eccf86f3a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowListResponseTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataFlowResourceInner; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowListResponse; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"eevy\",\"annotations\":[\"datasgzfczbg\"],\"folder\":{\"name\":\"gbeglqgleo\"}},\"name\":\"betnluankrrfxee\",\"type\":\"tijv\",\"etag\":\"vbmqzbqq\",\"id\":\"aj\"},{\"properties\":{\"type\":\"DataFlow\",\"description\":\"wxacevehj\",\"annotations\":[\"dataxoafgaoqltfae\",\"datalinmfgv\"],\"folder\":{\"name\":\"pghriypoqeyhl\"}},\"name\":\"ykprlpyznu\",\"type\":\"qdsmexiit\",\"etag\":\"uxtyasiibmi\",\"id\":\"nnust\"}],\"nextLink\":\"ljhnmgixhcmav\"}") - .toObject(DataFlowListResponse.class); - Assertions.assertEquals("aj", model.value().get(0).id()); - Assertions.assertEquals("eevy", model.value().get(0).properties().description()); - Assertions.assertEquals("gbeglqgleo", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("ljhnmgixhcmav", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowListResponse model = new DataFlowListResponse().withValue(Arrays.asList( - new DataFlowResourceInner().withId("aj") - .withProperties(new DataFlow().withDescription("eevy") - .withAnnotations(Arrays.asList("datasgzfczbg")) - .withFolder(new DataFlowFolder().withName("gbeglqgleo"))), - new DataFlowResourceInner().withId("nnust") - .withProperties(new DataFlow().withDescription("wxacevehj") - .withAnnotations(Arrays.asList("dataxoafgaoqltfae", "datalinmfgv")) - .withFolder(new DataFlowFolder().withName("pghriypoqeyhl"))))) - .withNextLink("ljhnmgixhcmav"); - model = BinaryData.fromObject(model).toObject(DataFlowListResponse.class); - Assertions.assertEquals("aj", model.value().get(0).id()); - Assertions.assertEquals("eevy", model.value().get(0).properties().description()); - Assertions.assertEquals("gbeglqgleo", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("ljhnmgixhcmav", model.nextLink()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java deleted file mode 100644 index fc4984eb7b82..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowReferenceTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowReference model = BinaryData.fromString( - "{\"type\":\"DataFlowReference\",\"referenceName\":\"otz\",\"datasetParameters\":\"datad\",\"parameters\":{\"qiuasigrows\":\"datalgry\",\"equygdjboqgrmtq\":\"datac\",\"uawvcmjzk\":\"datakqevadrmmw\",\"zugamxzkrrcoiis\":\"dataiidisczskoswoqiq\"},\"\":{\"u\":\"datanppcce\",\"f\":\"datatdsbezax\",\"y\":\"datakzxuiz\"}}") - .toObject(DataFlowReference.class); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.type()); - Assertions.assertEquals("otz", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowReference model = new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("otz") - .withDatasetParameters("datad") - .withParameters(mapOf("qiuasigrows", "datalgry", "equygdjboqgrmtq", "datac", "uawvcmjzk", "datakqevadrmmw", - "zugamxzkrrcoiis", "dataiidisczskoswoqiq")) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DataFlowReference.class); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.type()); - Assertions.assertEquals("otz", model.referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java deleted file mode 100644 index 88401f32a0b4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowResourceInnerTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataFlowResourceInner; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"qspkcdqzhlctd\",\"annotations\":[\"dataqn\",\"datayfp\",\"datahrqbnjjrcg\",\"datagydcw\"],\"folder\":{\"name\":\"jumvqqolihrraio\"}},\"name\":\"ubrjtl\",\"type\":\"xfuojrn\",\"etag\":\"flrzpas\",\"id\":\"biuimzdlyjdfq\"}") - .toObject(DataFlowResourceInner.class); - Assertions.assertEquals("biuimzdlyjdfq", model.id()); - Assertions.assertEquals("qspkcdqzhlctd", model.properties().description()); - Assertions.assertEquals("jumvqqolihrraio", model.properties().folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowResourceInner model = new DataFlowResourceInner().withId("biuimzdlyjdfq") - .withProperties(new DataFlow().withDescription("qspkcdqzhlctd") - .withAnnotations(Arrays.asList("dataqn", "datayfp", "datahrqbnjjrcg", "datagydcw")) - .withFolder(new DataFlowFolder().withName("jumvqqolihrraio"))); - model = BinaryData.fromObject(model).toObject(DataFlowResourceInner.class); - Assertions.assertEquals("biuimzdlyjdfq", model.id()); - Assertions.assertEquals("qspkcdqzhlctd", model.properties().description()); - Assertions.assertEquals("jumvqqolihrraio", model.properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java deleted file mode 100644 index 2f3fbabf8fd8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSinkTests.java +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSink; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowSink model = BinaryData.fromString( - "{\"schemaLinkedService\":{\"referenceName\":\"pp\",\"parameters\":{\"dcwmqsyrilmhxdqa\":\"dataohoqkpjtnqjilayw\",\"tmfwobbjwhl\":\"datalfylnkkbjpjvlyw\",\"nqzocrdzg\":\"datayj\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"euntgxdncaqtti\",\"parameters\":{\"uvny\":\"datai\",\"hlqtxnrflkn\":\"datatzgixgyrihlgmgb\",\"hipx\":\"datarndpgfjodhdaqotw\",\"uumljcirvpefyc\":\"datagsabvcipowzafc\"}},\"name\":\"veitit\",\"description\":\"s\",\"dataset\":{\"referenceName\":\"ajlnsjhwjuyxx\",\"parameters\":{\"aytuadxkxeqbwp\":\"datavmv\",\"xsl\":\"datatghyksarcdr\",\"x\":\"datavlzladl\",\"wzdanojisgglmvo\":\"datapbqhvfdqqjwkr\"}},\"linkedService\":{\"referenceName\":\"tuz\",\"parameters\":{\"kaehxsmzyg\":\"datatibpv\",\"eivmak\":\"datafwakw\",\"tfjmskdchmaiub\":\"datahysowljuxlkbect\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"zwpvgmfa\",\"datasetParameters\":\"datazazmg\",\"parameters\":{\"zrthqet\":\"datadgjqafkmkro\"},\"\":{\"iezeagm\":\"datartvaoznqni\",\"lzmb\":\"dataeituugedhfpjs\",\"fsyrledjc\":\"datasyjdeolctae\"}}}") - .toObject(DataFlowSink.class); - Assertions.assertEquals("veitit", model.name()); - Assertions.assertEquals("s", model.description()); - Assertions.assertEquals("ajlnsjhwjuyxx", model.dataset().referenceName()); - Assertions.assertEquals("tuz", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("zwpvgmfa", model.flowlet().referenceName()); - Assertions.assertEquals("pp", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("euntgxdncaqtti", model.rejectedDataLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowSink model - = new DataFlowSink().withName("veitit") - .withDescription("s") - .withDataset(new DatasetReference().withReferenceName("ajlnsjhwjuyxx") - .withParameters(mapOf("aytuadxkxeqbwp", "datavmv", "xsl", "datatghyksarcdr", "x", "datavlzladl", - "wzdanojisgglmvo", "datapbqhvfdqqjwkr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("tuz") - .withParameters(mapOf("kaehxsmzyg", "datatibpv", "eivmak", "datafwakw", "tfjmskdchmaiub", - "datahysowljuxlkbect"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("zwpvgmfa") - .withDatasetParameters("datazazmg") - .withParameters(mapOf("zrthqet", "datadgjqafkmkro")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("pp") - .withParameters(mapOf("dcwmqsyrilmhxdqa", "dataohoqkpjtnqjilayw", "tmfwobbjwhl", - "datalfylnkkbjpjvlyw", "nqzocrdzg", "datayj"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("euntgxdncaqtti") - .withParameters(mapOf("uvny", "datai", "hlqtxnrflkn", "datatzgixgyrihlgmgb", "hipx", - "datarndpgfjodhdaqotw", "uumljcirvpefyc", "datagsabvcipowzafc"))); - model = BinaryData.fromObject(model).toObject(DataFlowSink.class); - Assertions.assertEquals("veitit", model.name()); - Assertions.assertEquals("s", model.description()); - Assertions.assertEquals("ajlnsjhwjuyxx", model.dataset().referenceName()); - Assertions.assertEquals("tuz", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("zwpvgmfa", model.flowlet().referenceName()); - Assertions.assertEquals("pp", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("euntgxdncaqtti", model.rejectedDataLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java deleted file mode 100644 index 79e2e4076b58..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceSettingTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowSourceSettingTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowSourceSetting model = BinaryData.fromString( - "{\"sourceName\":\"oxgjiuqhibt\",\"rowLimit\":645359051,\"\":{\"ktvqylkmqpzoy\":\"datawjedmurrxxgew\"}}") - .toObject(DataFlowSourceSetting.class); - Assertions.assertEquals("oxgjiuqhibt", model.sourceName()); - Assertions.assertEquals(645359051, model.rowLimit()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowSourceSetting model = new DataFlowSourceSetting().withSourceName("oxgjiuqhibt") - .withRowLimit(645359051) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DataFlowSourceSetting.class); - Assertions.assertEquals("oxgjiuqhibt", model.sourceName()); - Assertions.assertEquals(645359051, model.rowLimit()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java deleted file mode 100644 index 32bdab0e0b7c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowSourceTests.java +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowSource model = BinaryData.fromString( - "{\"schemaLinkedService\":{\"referenceName\":\"pbninjgazlsvbzfc\",\"parameters\":{\"hsy\":\"dataeedwjccik\"}},\"name\":\"krdre\",\"description\":\"olr\",\"dataset\":{\"referenceName\":\"ehqbeivdlhydwbdb\",\"parameters\":{\"ytjlkesmmpath\":\"datalpu\",\"llbvgwzsfftedous\":\"databtahdeanii\",\"aqo\":\"dataktjtgra\",\"btxxwpfhnjzud\":\"datafkbebauzl\"}},\"linkedService\":{\"referenceName\":\"pzkg\",\"parameters\":{\"ygisrz\":\"dataoywhczzqrhmngqbe\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"kdidjc\",\"datasetParameters\":\"datarmpwctofld\",\"parameters\":{\"xkbrfg\":\"datacdhz\",\"fj\":\"datarwjiyew\",\"xet\":\"datarwq\"},\"\":{\"ycnawthvmaxgnuy\":\"datawvrrmdqn\"}}}") - .toObject(DataFlowSource.class); - Assertions.assertEquals("krdre", model.name()); - Assertions.assertEquals("olr", model.description()); - Assertions.assertEquals("ehqbeivdlhydwbdb", model.dataset().referenceName()); - Assertions.assertEquals("pzkg", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("kdidjc", model.flowlet().referenceName()); - Assertions.assertEquals("pbninjgazlsvbzfc", model.schemaLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowSource model = new DataFlowSource().withName("krdre") - .withDescription("olr") - .withDataset(new DatasetReference().withReferenceName("ehqbeivdlhydwbdb") - .withParameters(mapOf("ytjlkesmmpath", "datalpu", "llbvgwzsfftedous", "databtahdeanii", "aqo", - "dataktjtgra", "btxxwpfhnjzud", "datafkbebauzl"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pzkg") - .withParameters(mapOf("ygisrz", "dataoywhczzqrhmngqbe"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("kdidjc") - .withDatasetParameters("datarmpwctofld") - .withParameters(mapOf("xkbrfg", "datacdhz", "fj", "datarwjiyew", "xet", "datarwq")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("pbninjgazlsvbzfc") - .withParameters(mapOf("hsy", "dataeedwjccik"))); - model = BinaryData.fromObject(model).toObject(DataFlowSource.class); - Assertions.assertEquals("krdre", model.name()); - Assertions.assertEquals("olr", model.description()); - Assertions.assertEquals("ehqbeivdlhydwbdb", model.dataset().referenceName()); - Assertions.assertEquals("pzkg", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("kdidjc", model.flowlet().referenceName()); - Assertions.assertEquals("pbninjgazlsvbzfc", model.schemaLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java deleted file mode 100644 index 051b1cbb7a53..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowStagingInfoTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowStagingInfo; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowStagingInfoTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlowStagingInfo model = BinaryData.fromString( - "{\"linkedService\":{\"referenceName\":\"cpzgpxtiv\",\"parameters\":{\"n\":\"datanidibgqjxg\",\"kqmhhaowjr\":\"datahgovfgp\"}},\"folderPath\":\"datavuporqzdfuydzv\"}") - .toObject(DataFlowStagingInfo.class); - Assertions.assertEquals("cpzgpxtiv", model.linkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlowStagingInfo model = new DataFlowStagingInfo() - .withLinkedService(new LinkedServiceReference().withReferenceName("cpzgpxtiv") - .withParameters(mapOf("n", "datanidibgqjxg", "kqmhhaowjr", "datahgovfgp"))) - .withFolderPath("datavuporqzdfuydzv"); - model = BinaryData.fromObject(model).toObject(DataFlowStagingInfo.class); - Assertions.assertEquals("cpzgpxtiv", model.linkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java deleted file mode 100644 index 4e6fa7575f3d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataFlowTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataFlow model = BinaryData.fromString( - "{\"type\":\"DataFlow\",\"description\":\"kyoqufdv\",\"annotations\":[\"dataslzojh\",\"datactfnmdxotng\"],\"folder\":{\"name\":\"ugeyzihgrkyuiza\"}}") - .toObject(DataFlow.class); - Assertions.assertEquals("kyoqufdv", model.description()); - Assertions.assertEquals("ugeyzihgrkyuiza", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataFlow model = new DataFlow().withDescription("kyoqufdv") - .withAnnotations(Arrays.asList("dataslzojh", "datactfnmdxotng")) - .withFolder(new DataFlowFolder().withName("ugeyzihgrkyuiza")); - model = BinaryData.fromObject(model).toObject(DataFlow.class); - Assertions.assertEquals("kyoqufdv", model.description()); - Assertions.assertEquals("ugeyzihgrkyuiza", model.folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 7c018711f382..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DataFlow; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowsCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"sxxra\",\"annotations\":[\"dataicgq\",\"dataafk\",\"datats\",\"dataeagvqctcrdfxqh\"],\"folder\":{\"name\":\"ujrnfdqlzggvo\"}},\"name\":\"iiumrdb\",\"type\":\"jyijqciaznp\",\"etag\":\"gupqwqshmntlbfkn\",\"id\":\"csuvjbf\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DataFlowResource response = manager.dataFlows() - .define("iacwdxvlku") - .withExistingFactory("wumrfjj", "ptsjecqwdosbsng") - .withProperties(new DataFlow().withDescription("pwrgryrb") - .withAnnotations(Arrays.asList("dataeqcdikcqcvz", "datatfth", "datajxid", "datazmmpy")) - .withFolder(new DataFlowFolder().withName("xtccmqzkuqr"))) - .withIfMatch("tlh") - .create(); - - Assertions.assertEquals("csuvjbf", response.id()); - Assertions.assertEquals("sxxra", response.properties().description()); - Assertions.assertEquals("ujrnfdqlzggvo", response.properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java deleted file mode 100644 index 5c94248de7a4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.dataFlows().deleteWithResponse("ro", "ygtetmpw", "lbqdxvxdfkdwk", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java deleted file mode 100644 index 526ccc7098f4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsGetWithResponseMockTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"DataFlow\",\"description\":\"ltrztr\",\"annotations\":[\"datarjvr\",\"datayr\",\"datahfrsyckqwefmq\"],\"folder\":{\"name\":\"zlvfncphhlnba\"}},\"name\":\"frzgeobzmxz\",\"type\":\"ihlzgrojpnxzjrcc\",\"etag\":\"syceykvmlxhym\",\"id\":\"ghppy\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DataFlowResource response = manager.dataFlows() - .getWithResponse("oetetiskqxeclwl", "odrdnfmxomupdqp", "xivktdvwmefjpo", "llyvbvx", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("ghppy", response.id()); - Assertions.assertEquals("ltrztr", response.properties().description()); - Assertions.assertEquals("zlvfncphhlnba", response.properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java deleted file mode 100644 index c568eb2ead3c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataFlowsListByFactoryMockTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DataFlowResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DataFlowsListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"DataFlow\",\"description\":\"fdjxltjsmd\",\"annotations\":[\"datarrbkmzqfisggoap\",\"datadmxwe\"],\"folder\":{\"name\":\"ixekqlyctdx\"}},\"name\":\"gqffzedfbfqwllz\",\"type\":\"qzj\",\"etag\":\"odcs\",\"id\":\"bk\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.dataFlows().listByFactory("mnoecfjw", "kiupgmdsz", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("bk", response.iterator().next().id()); - Assertions.assertEquals("fdjxltjsmd", response.iterator().next().properties().description()); - Assertions.assertEquals("ixekqlyctdx", response.iterator().next().properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java deleted file mode 100644 index 23139606d1e5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTests.java +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DataLakeAnalyticsUsqlActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataLakeAnalyticsUsqlActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataLakeAnalyticsUsqlActivity model = BinaryData.fromString( - "{\"type\":\"DataLakeAnalyticsU-SQL\",\"typeProperties\":{\"scriptPath\":\"datandpkevtofvqjrd\",\"scriptLinkedService\":{\"referenceName\":\"dzqeuppxdzpjew\",\"parameters\":{\"xlyntfgbx\":\"datayszwkrkokgr\",\"tz\":\"dataa\"}},\"degreeOfParallelism\":\"datauwkkwtoxlxnpvea\",\"priority\":\"datad\",\"parameters\":{\"eeakzysak\":\"datatxronbznk\",\"ch\":\"dataxajtajdf\",\"pcmyqdrrkfhlayg\":\"datawkd\"},\"runtimeVersion\":\"dataore\",\"compilationMode\":\"dataoruiy\"},\"linkedServiceName\":{\"referenceName\":\"ourqdmz\",\"parameters\":{\"p\":\"datarsppucxig\",\"rjlal\":\"datavtbl\",\"aldtel\":\"datagg\"}},\"policy\":{\"timeout\":\"datafumezczhiradklzg\",\"retry\":\"datammapxnoogmf\",\"retryIntervalInSeconds\":85348138,\"secureInput\":false,\"secureOutput\":true,\"\":{\"jymumpydk\":\"dataezexwzpgywn\"}},\"name\":\"bcufhkrvxxzhqouo\",\"description\":\"czcks\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"fkgd\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"iybotuqzjfk\":\"dataxhsqdotbnfb\"}}],\"userProperties\":[{\"name\":\"terbsgwo\",\"value\":\"datakcvwqyfixw\"},{\"name\":\"q\",\"value\":\"dataxmiw\"},{\"name\":\"z\",\"value\":\"datahilypuxbnvquxut\"},{\"name\":\"wbsttmvaijnzq\",\"value\":\"dataqwkaevbgjhmy\"}],\"\":{\"otmir\":\"dataovmtidmycyyajl\"}}") - .toObject(DataLakeAnalyticsUsqlActivity.class); - Assertions.assertEquals("bcufhkrvxxzhqouo", model.name()); - Assertions.assertEquals("czcks", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("fkgd", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("terbsgwo", model.userProperties().get(0).name()); - Assertions.assertEquals("ourqdmz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(85348138, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("dzqeuppxdzpjew", model.scriptLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataLakeAnalyticsUsqlActivity model = new DataLakeAnalyticsUsqlActivity().withName("bcufhkrvxxzhqouo") - .withDescription("czcks") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("fkgd") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("terbsgwo").withValue("datakcvwqyfixw"), - new UserProperty().withName("q").withValue("dataxmiw"), - new UserProperty().withName("z").withValue("datahilypuxbnvquxut"), - new UserProperty().withName("wbsttmvaijnzq").withValue("dataqwkaevbgjhmy"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ourqdmz") - .withParameters(mapOf("p", "datarsppucxig", "rjlal", "datavtbl", "aldtel", "datagg"))) - .withPolicy(new ActivityPolicy().withTimeout("datafumezczhiradklzg") - .withRetry("datammapxnoogmf") - .withRetryIntervalInSeconds(85348138) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withScriptPath("datandpkevtofvqjrd") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("dzqeuppxdzpjew") - .withParameters(mapOf("xlyntfgbx", "datayszwkrkokgr", "tz", "dataa"))) - .withDegreeOfParallelism("datauwkkwtoxlxnpvea") - .withPriority("datad") - .withParameters(mapOf("eeakzysak", "datatxronbznk", "ch", "dataxajtajdf", "pcmyqdrrkfhlayg", "datawkd")) - .withRuntimeVersion("dataore") - .withCompilationMode("dataoruiy"); - model = BinaryData.fromObject(model).toObject(DataLakeAnalyticsUsqlActivity.class); - Assertions.assertEquals("bcufhkrvxxzhqouo", model.name()); - Assertions.assertEquals("czcks", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("fkgd", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("terbsgwo", model.userProperties().get(0).name()); - Assertions.assertEquals("ourqdmz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(85348138, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("dzqeuppxdzpjew", model.scriptLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java deleted file mode 100644 index 0c952d3256d3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataLakeAnalyticsUsqlActivityTypePropertiesTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataLakeAnalyticsUsqlActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DataLakeAnalyticsUsqlActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataLakeAnalyticsUsqlActivityTypeProperties model = BinaryData.fromString( - "{\"scriptPath\":\"dataipnclnbfxme\",\"scriptLinkedService\":{\"referenceName\":\"y\",\"parameters\":{\"bqbwbw\":\"datafrfzghnjaqzdzkyq\"}},\"degreeOfParallelism\":\"datawmmvb\",\"priority\":\"dataftkceyjt\",\"parameters\":{\"pzrcq\":\"dataoxtlqytxft\",\"phkmwbtrqklondbv\":\"datasjqrgtapawp\",\"rbjtjvqdwz\":\"dataqtpebaawzsxp\"},\"runtimeVersion\":\"dataxdgt\",\"compilationMode\":\"dataieqlikyctunfukeh\"}") - .toObject(DataLakeAnalyticsUsqlActivityTypeProperties.class); - Assertions.assertEquals("y", model.scriptLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataLakeAnalyticsUsqlActivityTypeProperties model - = new DataLakeAnalyticsUsqlActivityTypeProperties().withScriptPath("dataipnclnbfxme") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("y") - .withParameters(mapOf("bqbwbw", "datafrfzghnjaqzdzkyq"))) - .withDegreeOfParallelism("datawmmvb") - .withPriority("dataftkceyjt") - .withParameters(mapOf("pzrcq", "dataoxtlqytxft", "phkmwbtrqklondbv", "datasjqrgtapawp", "rbjtjvqdwz", - "dataqtpebaawzsxp")) - .withRuntimeVersion("dataxdgt") - .withCompilationMode("dataieqlikyctunfukeh"); - model = BinaryData.fromObject(model).toObject(DataLakeAnalyticsUsqlActivityTypeProperties.class); - Assertions.assertEquals("y", model.scriptLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java deleted file mode 100644 index 2500d8f65f5a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DataMapperMappingTests.java +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMappings; -import com.azure.resourcemanager.datafactory.models.MapperAttributeReference; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MappingType; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class DataMapperMappingTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DataMapperMapping model = BinaryData.fromString( - "{\"targetEntityName\":\"huxiqhzlraymez\",\"sourceEntityName\":\"skihmxrfd\",\"sourceConnectionReference\":{\"connectionName\":\"rednw\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"gmevuafpwzy\",\"type\":\"Aggregate\",\"functionName\":\"gwltxeqip\",\"expression\":\"zdyi\",\"attributeReference\":{\"name\":\"ayorprav\",\"entity\":\"oge\",\"entityConnectionReference\":{\"connectionName\":\"bnsmjkwynqxaek\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"jtqpkevmyltjcrsp\",\"entity\":\"l\",\"entityConnectionReference\":{}},{\"name\":\"clf\",\"entity\":\"annnoytzposewx\",\"entityConnectionReference\":{}},{\"name\":\"xvkqm\",\"entity\":\"pxvpifdfaif\",\"entityConnectionReference\":{}},{\"name\":\"zeyuubeidszl\",\"entity\":\"toi\",\"entityConnectionReference\":{}}]}]},\"sourceDenormalizeInfo\":\"dataygvfltgvdihoyn\"}") - .toObject(DataMapperMapping.class); - Assertions.assertEquals("huxiqhzlraymez", model.targetEntityName()); - Assertions.assertEquals("skihmxrfd", model.sourceEntityName()); - Assertions.assertEquals("rednw", model.sourceConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionReference().type()); - Assertions.assertEquals("gmevuafpwzy", model.attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("gwltxeqip", model.attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("zdyi", model.attributeMappingInfo().attributeMappings().get(0).expression()); - Assertions.assertEquals("ayorprav", - model.attributeMappingInfo().attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("oge", - model.attributeMappingInfo().attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("bnsmjkwynqxaek", - model.attributeMappingInfo() - .attributeMappings() - .get(0) - .attributeReference() - .entityConnectionReference() - .connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappingInfo() - .attributeMappings() - .get(0) - .attributeReference() - .entityConnectionReference() - .type()); - Assertions.assertEquals("jtqpkevmyltjcrsp", - model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("l", - model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).entity()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DataMapperMapping model = new DataMapperMapping().withTargetEntityName("huxiqhzlraymez") - .withSourceEntityName("skihmxrfd") - .withSourceConnectionReference( - new MapperConnectionReference().withConnectionName("rednw").withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo(new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList(new MapperAttributeMapping().withName("gmevuafpwzy") - .withType(MappingType.AGGREGATE) - .withFunctionName("gwltxeqip") - .withExpression("zdyi") - .withAttributeReference(new MapperAttributeReference().withName("ayorprav") - .withEntity("oge") - .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("bnsmjkwynqxaek") - .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("jtqpkevmyltjcrsp") - .withEntity("l") - .withEntityConnectionReference(new MapperConnectionReference()), - new MapperAttributeReference().withName("clf") - .withEntity("annnoytzposewx") - .withEntityConnectionReference(new MapperConnectionReference()), - new MapperAttributeReference().withName("xvkqm") - .withEntity("pxvpifdfaif") - .withEntityConnectionReference(new MapperConnectionReference()), - new MapperAttributeReference().withName("zeyuubeidszl") - .withEntity("toi") - .withEntityConnectionReference(new MapperConnectionReference())))))) - .withSourceDenormalizeInfo("dataygvfltgvdihoyn"); - model = BinaryData.fromObject(model).toObject(DataMapperMapping.class); - Assertions.assertEquals("huxiqhzlraymez", model.targetEntityName()); - Assertions.assertEquals("skihmxrfd", model.sourceEntityName()); - Assertions.assertEquals("rednw", model.sourceConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.sourceConnectionReference().type()); - Assertions.assertEquals("gmevuafpwzy", model.attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, model.attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("gwltxeqip", model.attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("zdyi", model.attributeMappingInfo().attributeMappings().get(0).expression()); - Assertions.assertEquals("ayorprav", - model.attributeMappingInfo().attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("oge", - model.attributeMappingInfo().attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("bnsmjkwynqxaek", - model.attributeMappingInfo() - .attributeMappings() - .get(0) - .attributeReference() - .entityConnectionReference() - .connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappingInfo() - .attributeMappings() - .get(0) - .attributeReference() - .entityConnectionReference() - .type()); - Assertions.assertEquals("jtqpkevmyltjcrsp", - model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("l", - model.attributeMappingInfo().attributeMappings().get(0).attributeReferences().get(0).entity()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java deleted file mode 100644 index a530d6c5b0cb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTests.java +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatabricksNotebookActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatabricksNotebookActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksNotebookActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksNotebook\",\"typeProperties\":{\"notebookPath\":\"dataidcytnzy\",\"baseParameters\":{\"nt\":\"datadwgqisrlhxfmvngd\",\"hnh\":\"datavn\"},\"libraries\":[{\"j\":\"databorjyprcojwiigt\",\"iftm\":\"dataczoqpkpib\",\"hlnaymsgbyho\":\"datazofont\",\"ennobjixoqqjbsag\":\"dataqugycorgnxmn\"},{\"r\":\"dataeoefwnjsorhpga\",\"hwxu\":\"datamwlpa\",\"s\":\"dataaktnmwlklqhw\"},{\"lsuiyvbildw\":\"datakoezcabf\",\"nvjqeigpylpmtw\":\"datal\"}]},\"linkedServiceName\":{\"referenceName\":\"dt\",\"parameters\":{\"indyurwzrxkha\":\"datavaxsw\"}},\"policy\":{\"timeout\":\"datajambtvc\",\"retry\":\"datalpbvponxhszrot\",\"retryIntervalInSeconds\":1589919162,\"secureInput\":true,\"secureOutput\":true,\"\":{\"nbzikl\":\"dataao\",\"zvvkehasxjmf\":\"datayzrtawjk\"}},\"name\":\"bs\",\"description\":\"xwcimamtqfrdfoiq\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"nrj\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Failed\"],\"\":{\"kmlfcgk\":\"dataytjmlroxvsclmt\",\"bestyy\":\"dataeitphzuaznsbvu\",\"impuwgrny\":\"dataljo\",\"awzjhfauub\":\"datarizsekfudranmdcf\"}},{\"activity\":\"vnaf\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"grufbzgnrjfzba\":\"datacmviclhommhaxt\"}},{\"activity\":\"qmmkmqdfjeu\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"vywolccxd\":\"dataskzplbzyju\",\"spnzqohhhexgxn\":\"datatkhehbosaxge\",\"vfottycfo\":\"datafodxiy\",\"sjxurrhpihtxgjzi\":\"datatzdxbspglcbhah\"}},{\"activity\":\"yhujgrbjmzagxjoi\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"d\":\"datacrf\",\"jzquwjgfihlo\":\"datatcf\",\"efxvggkjbhs\":\"dataauorzbk\",\"yajijzrt\":\"datayy\"}}],\"userProperties\":[{\"name\":\"gonhmblkkel\",\"value\":\"datajk\"},{\"name\":\"emneu\",\"value\":\"datapynenca\"},{\"name\":\"kqvcf\",\"value\":\"datargwxgczwxyghs\"}],\"\":{\"hygbe\":\"datavxcrzpdqwa\",\"lmfh\":\"datafiwbtfki\"}}") - .toObject(DatabricksNotebookActivity.class); - Assertions.assertEquals("bs", model.name()); - Assertions.assertEquals("xwcimamtqfrdfoiq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("nrj", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gonhmblkkel", model.userProperties().get(0).name()); - Assertions.assertEquals("dt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1589919162, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksNotebookActivity model = new DatabricksNotebookActivity().withName("bs") - .withDescription("xwcimamtqfrdfoiq") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("nrj") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vnaf") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qmmkmqdfjeu") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yhujgrbjmzagxjoi") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gonhmblkkel").withValue("datajk"), - new UserProperty().withName("emneu").withValue("datapynenca"), - new UserProperty().withName("kqvcf").withValue("datargwxgczwxyghs"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dt") - .withParameters(mapOf("indyurwzrxkha", "datavaxsw"))) - .withPolicy(new ActivityPolicy().withTimeout("datajambtvc") - .withRetry("datalpbvponxhszrot") - .withRetryIntervalInSeconds(1589919162) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withNotebookPath("dataidcytnzy") - .withBaseParameters(mapOf("nt", "datadwgqisrlhxfmvngd", "hnh", "datavn")) - .withLibraries(Arrays.asList( - mapOf("j", "databorjyprcojwiigt", "iftm", "dataczoqpkpib", "hlnaymsgbyho", "datazofont", - "ennobjixoqqjbsag", "dataqugycorgnxmn"), - mapOf("r", "dataeoefwnjsorhpga", "hwxu", "datamwlpa", "s", "dataaktnmwlklqhw"), - mapOf("lsuiyvbildw", "datakoezcabf", "nvjqeigpylpmtw", "datal"))); - model = BinaryData.fromObject(model).toObject(DatabricksNotebookActivity.class); - Assertions.assertEquals("bs", model.name()); - Assertions.assertEquals("xwcimamtqfrdfoiq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("nrj", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gonhmblkkel", model.userProperties().get(0).name()); - Assertions.assertEquals("dt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1589919162, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java deleted file mode 100644 index 4922cfb7eb5c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksNotebookActivityTypePropertiesTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DatabricksNotebookActivityTypeProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class DatabricksNotebookActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksNotebookActivityTypeProperties model = BinaryData.fromString( - "{\"notebookPath\":\"datazsxjrafhdf\",\"baseParameters\":{\"huvflgwgqhe\":\"dataaawwnqijp\",\"guodoujpwqbot\":\"dataeasmk\",\"xxpyrtajlydefqfv\":\"datavcp\",\"xbqdwbjhgjzvceyx\":\"dataqruympov\"},\"libraries\":[{\"dggyhpuhcc\":\"datauyk\",\"abdkbkblop\":\"dataehndbutpt\"}]}") - .toObject(DatabricksNotebookActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksNotebookActivityTypeProperties model - = new DatabricksNotebookActivityTypeProperties().withNotebookPath("datazsxjrafhdf") - .withBaseParameters(mapOf("huvflgwgqhe", "dataaawwnqijp", "guodoujpwqbot", "dataeasmk", - "xxpyrtajlydefqfv", "datavcp", "xbqdwbjhgjzvceyx", "dataqruympov")) - .withLibraries(Arrays.asList(mapOf("dggyhpuhcc", "datauyk", "abdkbkblop", "dataehndbutpt"))); - model = BinaryData.fromObject(model).toObject(DatabricksNotebookActivityTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java deleted file mode 100644 index 100171d47782..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTests.java +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatabricksSparkJarActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatabricksSparkJarActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksSparkJarActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksSparkJar\",\"typeProperties\":{\"mainClassName\":\"datamorfzuhvycdndcz\",\"parameters\":[\"datafvris\",\"dataplnddpqcq\",\"datanvkmkbtpbwthzmqa\",\"datavwbgsanvdr\"],\"libraries\":[{\"zmegqt\":\"datashraepcl\",\"olu\":\"datahrzeibku\",\"raylygclwbuoqamv\":\"datadjeqdmolmcybaqke\"},{\"dll\":\"dataexqvthfnhzgt\",\"azyhhcqjahhc\":\"dataunoelknyopglgk\"}]},\"linkedServiceName\":{\"referenceName\":\"oaryh\",\"parameters\":{\"bdyhjfmyc\":\"dataftgmqlcooyxfrr\",\"yydbxlturlnbmj\":\"dataucccb\"}},\"policy\":{\"timeout\":\"dataqig\",\"retry\":\"datamozlhltgtegx\",\"retryIntervalInSeconds\":1613660684,\"secureInput\":true,\"secureOutput\":true,\"\":{\"noiriemkxm\":\"dataxscrswy\",\"imnfgfsjptb\":\"dataq\",\"toe\":\"datasvweu\"}},\"name\":\"fnhmrawmchcdegw\",\"description\":\"vaiewfjwfkw\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"t\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Skipped\",\"Failed\"],\"\":{\"fylymuwafslyt\":\"dataujywg\",\"fwpmpapwmpd\":\"datattjducosxcdhtovt\"}},{\"activity\":\"vkiwjbufz\",\"dependencyConditions\":[\"Completed\"],\"\":{\"khe\":\"datavudigwky\",\"iupjgeb\":\"datagapraafjxgoj\"}},{\"activity\":\"suiklncqoyghrba\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"lyeoynt\":\"datauxyfpcv\",\"q\":\"datax\",\"upifgizkvokkhr\":\"datazmfuh\",\"f\":\"datahvmez\"}},{\"activity\":\"drtokw\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\",\"Skipped\"],\"\":{\"uwujxx\":\"dataklgunpajwgwxctdp\",\"bzisqpstxulnn\":\"datamookh\"}}],\"userProperties\":[{\"name\":\"u\",\"value\":\"datan\"}],\"\":{\"ltvwe\":\"dataacvttdyvi\",\"lvx\":\"datazfyllkunwinqy\",\"qhpkqkxjl\":\"dataxztjece\"}}") - .toObject(DatabricksSparkJarActivity.class); - Assertions.assertEquals("fnhmrawmchcdegw", model.name()); - Assertions.assertEquals("vaiewfjwfkw", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("t", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("u", model.userProperties().get(0).name()); - Assertions.assertEquals("oaryh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1613660684, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksSparkJarActivity model = new DatabricksSparkJarActivity().withName("fnhmrawmchcdegw") - .withDescription("vaiewfjwfkw") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("t") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vkiwjbufz") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("suiklncqoyghrba") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("drtokw") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("u").withValue("datan"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("oaryh") - .withParameters(mapOf("bdyhjfmyc", "dataftgmqlcooyxfrr", "yydbxlturlnbmj", "dataucccb"))) - .withPolicy(new ActivityPolicy().withTimeout("dataqig") - .withRetry("datamozlhltgtegx") - .withRetryIntervalInSeconds(1613660684) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withMainClassName("datamorfzuhvycdndcz") - .withParameters(Arrays.asList("datafvris", "dataplnddpqcq", "datanvkmkbtpbwthzmqa", "datavwbgsanvdr")) - .withLibraries(Arrays.asList( - mapOf("zmegqt", "datashraepcl", "olu", "datahrzeibku", "raylygclwbuoqamv", "datadjeqdmolmcybaqke"), - mapOf("dll", "dataexqvthfnhzgt", "azyhhcqjahhc", "dataunoelknyopglgk"))); - model = BinaryData.fromObject(model).toObject(DatabricksSparkJarActivity.class); - Assertions.assertEquals("fnhmrawmchcdegw", model.name()); - Assertions.assertEquals("vaiewfjwfkw", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("t", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("u", model.userProperties().get(0).name()); - Assertions.assertEquals("oaryh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1613660684, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java deleted file mode 100644 index ff156065ed1d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkJarActivityTypePropertiesTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkJarActivityTypeProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class DatabricksSparkJarActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksSparkJarActivityTypeProperties model = BinaryData.fromString( - "{\"mainClassName\":\"datajlkjhmugyayhp\",\"parameters\":[\"datalsdgiqg\",\"dataeqcgunsoikev\",\"dataglzxgwk\"],\"libraries\":[{\"hmrk\":\"datazpdaqxnkdqs\",\"z\":\"datawkfgvhwkwzxj\",\"xduhydxahjudaz\":\"datastirrhbkzzqwikq\",\"of\":\"datamgsxolwo\"},{\"ykfcccaujgacckjq\":\"dataludflfxwlwht\",\"dbgmgxbvge\":\"datapj\",\"ntdynpi\":\"datab\"},{\"iuteusuxvli\":\"dataigxefscsrw\"}]}") - .toObject(DatabricksSparkJarActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksSparkJarActivityTypeProperties model - = new DatabricksSparkJarActivityTypeProperties().withMainClassName("datajlkjhmugyayhp") - .withParameters(Arrays.asList("datalsdgiqg", "dataeqcgunsoikev", "dataglzxgwk")) - .withLibraries(Arrays.asList( - mapOf("hmrk", "datazpdaqxnkdqs", "z", "datawkfgvhwkwzxj", "xduhydxahjudaz", "datastirrhbkzzqwikq", - "of", "datamgsxolwo"), - mapOf("ykfcccaujgacckjq", "dataludflfxwlwht", "dbgmgxbvge", "datapj", "ntdynpi", "datab"), - mapOf("iuteusuxvli", "dataigxefscsrw"))); - model = BinaryData.fromObject(model).toObject(DatabricksSparkJarActivityTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java deleted file mode 100644 index 4a70d170caf9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTests.java +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatabricksSparkPythonActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatabricksSparkPythonActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksSparkPythonActivity model = BinaryData.fromString( - "{\"type\":\"DatabricksSparkPython\",\"typeProperties\":{\"pythonFile\":\"datagawenwtmvzzsb\",\"parameters\":[\"datawg\",\"datazvvtdr\",\"dataoc\"],\"libraries\":[{\"qdostvx\":\"datanphfppjzmpxam\"},{\"cpdussqfzerkpai\":\"datafnmnfndrbkko\",\"mczl\":\"datakgdrqkvnpz\",\"hz\":\"datanfwslvspar\",\"kuvbesrawzxnwxsj\":\"dataynbxwzixmv\"},{\"nsxhpqe\":\"dataigepfoksl\",\"arfdfnqfvrsxl\":\"datazzydpvvcchd\",\"fpubnx\":\"datagnlbvbdqmjce\",\"awybborjdxh\":\"dataohyesmlscvh\"}]},\"linkedServiceName\":{\"referenceName\":\"yhdkufqzuduqfd\",\"parameters\":{\"tzugkfabvekkxl\":\"dataxtplpg\"}},\"policy\":{\"timeout\":\"datazcv\",\"retry\":\"dataglvzhuj\",\"retryIntervalInSeconds\":1411241354,\"secureInput\":true,\"secureOutput\":false,\"\":{\"numvorosqesspwu\":\"dataazaoytkubmv\",\"siitzbyue\":\"datakjrqsp\"}},\"name\":\"umqmor\",\"description\":\"psflmwduisrvlun\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"rzth\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\",\"Completed\"],\"\":{\"zlkmtrr\":\"datahhcfqzmjmflc\",\"kgklqucxewcd\":\"databulvau\"}},{\"activity\":\"rqjsmhkqzvarqi\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"xhmdorxb\":\"dataimvnvxhxza\"}},{\"activity\":\"aprksoeq\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Skipped\",\"Failed\"],\"\":{\"aktgtwvzps\":\"datarcyexb\",\"gjidrag\":\"datagho\",\"yimbqdsu\":\"datacwcdbtop\",\"xwr\":\"dataazkouvvgcwsimhj\"}}],\"userProperties\":[{\"name\":\"ofwopzqxpk\",\"value\":\"datanxjmlys\"}],\"\":{\"lfahryuz\":\"databuxjhquzirhcghn\",\"ptpq\":\"dataeuegrdit\",\"xzfy\":\"dataajggmmiwoisql\"}}") - .toObject(DatabricksSparkPythonActivity.class); - Assertions.assertEquals("umqmor", model.name()); - Assertions.assertEquals("psflmwduisrvlun", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("rzth", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ofwopzqxpk", model.userProperties().get(0).name()); - Assertions.assertEquals("yhdkufqzuduqfd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1411241354, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksSparkPythonActivity model = new DatabricksSparkPythonActivity().withName("umqmor") - .withDescription("psflmwduisrvlun") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("rzth") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rqjsmhkqzvarqi") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("aprksoeq") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ofwopzqxpk").withValue("datanxjmlys"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yhdkufqzuduqfd") - .withParameters(mapOf("tzugkfabvekkxl", "dataxtplpg"))) - .withPolicy(new ActivityPolicy().withTimeout("datazcv") - .withRetry("dataglvzhuj") - .withRetryIntervalInSeconds(1411241354) - .withSecureInput(true) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withPythonFile("datagawenwtmvzzsb") - .withParameters(Arrays.asList("datawg", "datazvvtdr", "dataoc")) - .withLibraries(Arrays.asList(mapOf("qdostvx", "datanphfppjzmpxam"), - mapOf("cpdussqfzerkpai", "datafnmnfndrbkko", "mczl", "datakgdrqkvnpz", "hz", "datanfwslvspar", - "kuvbesrawzxnwxsj", "dataynbxwzixmv"), - mapOf("nsxhpqe", "dataigepfoksl", "arfdfnqfvrsxl", "datazzydpvvcchd", "fpubnx", "datagnlbvbdqmjce", - "awybborjdxh", "dataohyesmlscvh"))); - model = BinaryData.fromObject(model).toObject(DatabricksSparkPythonActivity.class); - Assertions.assertEquals("umqmor", model.name()); - Assertions.assertEquals("psflmwduisrvlun", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("rzth", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ofwopzqxpk", model.userProperties().get(0).name()); - Assertions.assertEquals("yhdkufqzuduqfd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1411241354, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java deleted file mode 100644 index 293849e844bf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatabricksSparkPythonActivityTypePropertiesTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DatabricksSparkPythonActivityTypeProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class DatabricksSparkPythonActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatabricksSparkPythonActivityTypeProperties model = BinaryData.fromString( - "{\"pythonFile\":\"datan\",\"parameters\":[\"datanjzaaoxwcptoihoy\",\"datauaxfjuzgslqpzdx\"],\"libraries\":[{\"zscepogg\":\"datalgczvfb\",\"exyiuhjqd\":\"datappufueiaie\",\"udai\":\"datalxabtlmszq\",\"jbpt\":\"datafqnxjkopivsz\"},{\"bqsj\":\"datahabzjemqvlouucax\",\"jimussvur\":\"datancgqhpqgivyx\",\"valvkdaql\":\"datalwdxnx\",\"whtws\":\"datasoqrhwla\"},{\"rvtrwswbm\":\"dataiwpzucetzis\",\"btthzfgpzy\":\"dataubh\",\"jecajtuo\":\"dataivusehyvqxjbqfcl\"},{\"xn\":\"datadlzxuakbavpk\",\"vsgx\":\"datarbckfzb\"}]}") - .toObject(DatabricksSparkPythonActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatabricksSparkPythonActivityTypeProperties model - = new DatabricksSparkPythonActivityTypeProperties().withPythonFile("datan") - .withParameters(Arrays.asList("datanjzaaoxwcptoihoy", "datauaxfjuzgslqpzdx")) - .withLibraries(Arrays.asList( - mapOf("zscepogg", "datalgczvfb", "exyiuhjqd", "datappufueiaie", "udai", "datalxabtlmszq", "jbpt", - "datafqnxjkopivsz"), - mapOf("bqsj", "datahabzjemqvlouucax", "jimussvur", "datancgqhpqgivyx", "valvkdaql", "datalwdxnx", - "whtws", "datasoqrhwla"), - mapOf("rvtrwswbm", "dataiwpzucetzis", "btthzfgpzy", "dataubh", "jecajtuo", "dataivusehyvqxjbqfcl"), - mapOf("xn", "datadlzxuakbavpk", "vsgx", "datarbckfzb"))); - model = BinaryData.fromObject(model).toObject(DatabricksSparkPythonActivityTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java deleted file mode 100644 index 5ae708df3785..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetCompressionTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import java.util.HashMap; -import java.util.Map; - -public final class DatasetCompressionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetCompression model = BinaryData.fromString( - "{\"type\":\"databri\",\"level\":\"datalwwtr\",\"\":{\"nsnaajphmp\":\"dataqkvyhzokpoyuohu\",\"pezco\":\"dataejnglpwsadaxjsum\",\"mfqzwqdnx\":\"dataoyj\",\"qzkvemyzdpczaq\":\"dataeedcnwmywx\"}}") - .toObject(DatasetCompression.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetCompression model - = new DatasetCompression().withType("databri").withLevel("datalwwtr").withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DatasetCompression.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java deleted file mode 100644 index b65eccf82608..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetDebugResourceTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetDebugResource; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatasetDebugResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Dataset\",\"description\":\"qmjxlyyzglgouwtl\",\"structure\":\"datajyuojqtobaxkjeyt\",\"schema\":\"datalbfjkwr\",\"linkedServiceName\":{\"referenceName\":\"snkq\",\"parameters\":{\"qunjqh\":\"datay\"}},\"parameters\":{\"ifmjnn\":{\"type\":\"Float\",\"defaultValue\":\"dataulkpakd\"},\"yirdhlisngwflqq\":{\"type\":\"String\",\"defaultValue\":\"dataqabpxuckpggqow\"}},\"annotations\":[\"datazruwn\",\"dataqxpxiwfcngjsaa\",\"dataiixtmkzj\",\"datakv\"],\"folder\":{\"name\":\"hgfgrwsd\"},\"\":{\"bglbyvict\":\"dataatzv\"}},\"name\":\"brxkjzwr\"}") - .toObject(DatasetDebugResource.class); - Assertions.assertEquals("brxkjzwr", model.name()); - Assertions.assertEquals("qmjxlyyzglgouwtl", model.properties().description()); - Assertions.assertEquals("snkq", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("ifmjnn").type()); - Assertions.assertEquals("hgfgrwsd", model.properties().folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetDebugResource model = new DatasetDebugResource().withName("brxkjzwr") - .withProperties(new Dataset().withDescription("qmjxlyyzglgouwtl") - .withStructure("datajyuojqtobaxkjeyt") - .withSchema("datalbfjkwr") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("snkq").withParameters(mapOf("qunjqh", "datay"))) - .withParameters(mapOf("ifmjnn", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataulkpakd"), - "yirdhlisngwflqq", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqabpxuckpggqow"))) - .withAnnotations(Arrays.asList("datazruwn", "dataqxpxiwfcngjsaa", "dataiixtmkzj", "datakv")) - .withFolder(new DatasetFolder().withName("hgfgrwsd")) - .withAdditionalProperties(mapOf("type", "Dataset"))); - model = BinaryData.fromObject(model).toObject(DatasetDebugResource.class); - Assertions.assertEquals("brxkjzwr", model.name()); - Assertions.assertEquals("qmjxlyyzglgouwtl", model.properties().description()); - Assertions.assertEquals("snkq", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("ifmjnn").type()); - Assertions.assertEquals("hgfgrwsd", model.properties().folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java deleted file mode 100644 index 9a1b172e92b5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetFolderTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import org.junit.jupiter.api.Assertions; - -public final class DatasetFolderTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetFolder model = BinaryData.fromString("{\"name\":\"eyvpnqicvinvkj\"}").toObject(DatasetFolder.class); - Assertions.assertEquals("eyvpnqicvinvkj", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetFolder model = new DatasetFolder().withName("eyvpnqicvinvkj"); - model = BinaryData.fromObject(model).toObject(DatasetFolder.class); - Assertions.assertEquals("eyvpnqicvinvkj", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java deleted file mode 100644 index f72ded2c2642..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetListResponseTests.java +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DatasetResourceInner; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetListResponse; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatasetListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"uxvypomgkopkwh\",\"structure\":\"datav\",\"schema\":\"datajqg\",\"linkedServiceName\":{\"referenceName\":\"ysmocmbqfqvmkcxo\",\"parameters\":{\"kcbcue\":\"datavhelxprglyatdd\",\"hos\":\"datarjxgciqib\",\"ibahwflus\":\"datasdqrhzoymibmrq\"}},\"parameters\":{\"piexpbtgiw\":{\"type\":\"Object\",\"defaultValue\":\"datarkwofyyvoqa\"},\"tdtkcn\":{\"type\":\"Object\",\"defaultValue\":\"dataenwash\"},\"i\":{\"type\":\"Float\",\"defaultValue\":\"databpokulpiujwaasip\"},\"rpqlp\":{\"type\":\"Int\",\"defaultValue\":\"datayuq\"}},\"annotations\":[\"dataciuqgbdb\",\"datat\",\"datauvfbtkuwh\",\"datamhykojoxafnndl\"],\"folder\":{\"name\":\"hkoymkcdyhbp\"},\"\":{\"xywsuws\":\"datawdreqnovvqfovl\",\"aeneqnzarrwl\":\"datarsndsytgadgvra\",\"jfqka\":\"datauu\"}},\"name\":\"wiipfpub\",\"type\":\"bwwift\",\"etag\":\"qkvpuvksgplsakn\",\"id\":\"fsynljphuop\"},{\"properties\":{\"type\":\"Dataset\",\"description\":\"dlqiyntorzih\",\"structure\":\"dataosjswsr\",\"schema\":\"datalyzrpzbchckqqzqi\",\"linkedServiceName\":{\"referenceName\":\"xiy\",\"parameters\":{\"ynkedyatrwyhqmib\":\"datai\",\"mnzgmwznmabi\":\"datayhwitsmypyynpcdp\",\"wwrlkdmtncv\":\"datansorgjhxbldt\",\"xdy\":\"datakotl\"}},\"parameters\":{\"hadoocrk\":{\"type\":\"Array\",\"defaultValue\":\"datacogjltdtbn\"},\"gxqquezik\":{\"type\":\"Object\",\"defaultValue\":\"datakhnvpam\"},\"lla\":{\"type\":\"Int\",\"defaultValue\":\"datagxk\"},\"ccjzkzivgvv\":{\"type\":\"Bool\",\"defaultValue\":\"datalwuip\"}},\"annotations\":[\"datay\",\"datahyrnxxmu\"],\"folder\":{\"name\":\"ndrdvstkwq\"},\"\":{\"ygdvwv\":\"datahealmfmtda\"}},\"name\":\"iohgwxrtfud\",\"type\":\"pxgy\",\"etag\":\"gvr\",\"id\":\"npkukghimdblx\"}],\"nextLink\":\"imfnjhfjx\"}") - .toObject(DatasetListResponse.class); - Assertions.assertEquals("fsynljphuop", model.value().get(0).id()); - Assertions.assertEquals("uxvypomgkopkwh", model.value().get(0).properties().description()); - Assertions.assertEquals("ysmocmbqfqvmkcxo", - model.value().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("piexpbtgiw").type()); - Assertions.assertEquals("hkoymkcdyhbp", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("imfnjhfjx", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetListResponse model = new DatasetListResponse().withValue(Arrays.asList( - new DatasetResourceInner().withId("fsynljphuop") - .withProperties(new Dataset().withDescription("uxvypomgkopkwh") - .withStructure("datav") - .withSchema("datajqg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ysmocmbqfqvmkcxo") - .withParameters(mapOf("kcbcue", "datavhelxprglyatdd", "hos", "datarjxgciqib", "ibahwflus", - "datasdqrhzoymibmrq"))) - .withParameters(mapOf("piexpbtgiw", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datarkwofyyvoqa"), - "tdtkcn", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataenwash"), "i", - new ParameterSpecification().withType(ParameterType.FLOAT) - .withDefaultValue("databpokulpiujwaasip"), - "rpqlp", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayuq"))) - .withAnnotations(Arrays.asList("dataciuqgbdb", "datat", "datauvfbtkuwh", "datamhykojoxafnndl")) - .withFolder(new DatasetFolder().withName("hkoymkcdyhbp")) - .withAdditionalProperties(mapOf("type", "Dataset"))), - new DatasetResourceInner().withId("npkukghimdblx") - .withProperties( - new Dataset().withDescription("dlqiyntorzih") - .withStructure("dataosjswsr") - .withSchema("datalyzrpzbchckqqzqi") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xiy") - .withParameters(mapOf("ynkedyatrwyhqmib", "datai", "mnzgmwznmabi", "datayhwitsmypyynpcdp", - "wwrlkdmtncv", "datansorgjhxbldt", "xdy", "datakotl"))) - .withParameters(mapOf("hadoocrk", - new ParameterSpecification().withType(ParameterType.ARRAY) - .withDefaultValue("datacogjltdtbn"), - "gxqquezik", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakhnvpam"), - "lla", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datagxk"), - "ccjzkzivgvv", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalwuip"))) - .withAnnotations(Arrays.asList("datay", "datahyrnxxmu")) - .withFolder(new DatasetFolder().withName("ndrdvstkwq")) - .withAdditionalProperties(mapOf("type", "Dataset"))))) - .withNextLink("imfnjhfjx"); - model = BinaryData.fromObject(model).toObject(DatasetListResponse.class); - Assertions.assertEquals("fsynljphuop", model.value().get(0).id()); - Assertions.assertEquals("uxvypomgkopkwh", model.value().get(0).properties().description()); - Assertions.assertEquals("ysmocmbqfqvmkcxo", - model.value().get(0).properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("piexpbtgiw").type()); - Assertions.assertEquals("hkoymkcdyhbp", model.value().get(0).properties().folder().name()); - Assertions.assertEquals("imfnjhfjx", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java deleted file mode 100644 index 48712cba8495..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetLocationTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import java.util.HashMap; -import java.util.Map; - -public final class DatasetLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetLocation model = BinaryData.fromString( - "{\"type\":\"DatasetLocation\",\"folderPath\":\"datavlkpzwb\",\"fileName\":\"datarecchdidrmu\",\"\":{\"khhwmj\":\"datahmjedbiuc\",\"ihufoihp\":\"datajbweunxcqr\",\"kzcscpiuzvkun\":\"dataiybxvgnzuzpb\"}}") - .toObject(DatasetLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetLocation model = new DatasetLocation().withFolderPath("datavlkpzwb") - .withFileName("datarecchdidrmu") - .withAdditionalProperties(mapOf("type", "DatasetLocation")); - model = BinaryData.fromObject(model).toObject(DatasetLocation.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java deleted file mode 100644 index c991892184d5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetReferenceTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatasetReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetReference model - = BinaryData.fromString("{\"referenceName\":\"ojbf\",\"parameters\":{\"xmbjroum\":\"datakfnjyixhafrat\"}}") - .toObject(DatasetReference.class); - Assertions.assertEquals("ojbf", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetReference model - = new DatasetReference().withReferenceName("ojbf").withParameters(mapOf("xmbjroum", "datakfnjyixhafrat")); - model = BinaryData.fromObject(model).toObject(DatasetReference.class); - Assertions.assertEquals("ojbf", model.referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java deleted file mode 100644 index 750b67119925..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetResourceInnerTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DatasetResourceInner; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatasetResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Dataset\",\"description\":\"szkkfoqre\",\"structure\":\"datakzikfjawneaivxwc\",\"schema\":\"datalpcirelsf\",\"linkedServiceName\":{\"referenceName\":\"aenwabf\",\"parameters\":{\"nozj\":\"datalddxbjhwua\"}},\"parameters\":{\"ag\":{\"type\":\"Array\",\"defaultValue\":\"dataoulpjrv\"}},\"annotations\":[\"dataimjwosyt\",\"dataitc\"],\"folder\":{\"name\":\"cktqumiekkezzi\"},\"\":{\"hdgqggeb\":\"datayf\"}},\"name\":\"nyga\",\"type\":\"idb\",\"etag\":\"atpxl\",\"id\":\"xcyjmoadsuvarmy\"}") - .toObject(DatasetResourceInner.class); - Assertions.assertEquals("xcyjmoadsuvarmy", model.id()); - Assertions.assertEquals("szkkfoqre", model.properties().description()); - Assertions.assertEquals("aenwabf", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.properties().parameters().get("ag").type()); - Assertions.assertEquals("cktqumiekkezzi", model.properties().folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetResourceInner model = new DatasetResourceInner().withId("xcyjmoadsuvarmy") - .withProperties(new Dataset().withDescription("szkkfoqre") - .withStructure("datakzikfjawneaivxwc") - .withSchema("datalpcirelsf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aenwabf") - .withParameters(mapOf("nozj", "datalddxbjhwua"))) - .withParameters(mapOf("ag", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataoulpjrv"))) - .withAnnotations(Arrays.asList("dataimjwosyt", "dataitc")) - .withFolder(new DatasetFolder().withName("cktqumiekkezzi")) - .withAdditionalProperties(mapOf("type", "Dataset"))); - model = BinaryData.fromObject(model).toObject(DatasetResourceInner.class); - Assertions.assertEquals("xcyjmoadsuvarmy", model.id()); - Assertions.assertEquals("szkkfoqre", model.properties().description()); - Assertions.assertEquals("aenwabf", model.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.properties().parameters().get("ag").type()); - Assertions.assertEquals("cktqumiekkezzi", model.properties().folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java deleted file mode 100644 index eacc0ea69d0e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetSchemaDataElementTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetSchemaDataElement; -import java.util.HashMap; -import java.util.Map; - -public final class DatasetSchemaDataElementTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetSchemaDataElement model = BinaryData.fromString( - "{\"name\":\"datakhoitw\",\"type\":\"datajsdmm\",\"\":{\"hzqpxzbawkikcdgf\":\"datanckidbjpg\",\"decfiwhagxsure\":\"databssdpjeyoqxd\",\"zjk\":\"dataqrshzzbgullcxiq\",\"rouigdmfivjqte\":\"dataxdupnamg\"}}") - .toObject(DatasetSchemaDataElement.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetSchemaDataElement model = new DatasetSchemaDataElement().withName("datakhoitw") - .withType("datajsdmm") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(DatasetSchemaDataElement.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java deleted file mode 100644 index 98e66f5927d0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetStorageFormatTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class DatasetStorageFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DatasetStorageFormat model = BinaryData.fromString( - "{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataqq\",\"deserializer\":\"datadydkghpcvrwqir\",\"\":{\"odmkrrwepgqv\":\"datatyhhmvfxlapja\",\"bwlyvxc\":\"dataokqlujqgir\",\"stvzuzhasupml\":\"datapqvctsfaeuhwwsk\"}}") - .toObject(DatasetStorageFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DatasetStorageFormat model = new DatasetStorageFormat().withSerializer("dataqq") - .withDeserializer("datadydkghpcvrwqir") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat")); - model = BinaryData.fromObject(model).toObject(DatasetStorageFormat.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java deleted file mode 100644 index ea28f4917fde..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Dataset model = BinaryData.fromString( - "{\"type\":\"Dataset\",\"description\":\"mjsjqb\",\"structure\":\"datahyxxrwlycoduhpk\",\"schema\":\"datagymare\",\"linkedServiceName\":{\"referenceName\":\"n\",\"parameters\":{\"dgssofwqmzqal\":\"dataqugjhkycube\",\"cqqudf\":\"datarmnjijpx\",\"ayffim\":\"databyxbaaabjy\",\"gsexne\":\"datazrtuzq\"}},\"parameters\":{\"ewzsyyceuzsoib\":{\"type\":\"Int\",\"defaultValue\":\"datanw\"}},\"annotations\":[\"datapfrxtrthzvay\",\"datadwkqbrq\",\"databpaxhexiilivpdt\",\"datairqtdqoa\"],\"folder\":{\"name\":\"uzf\"},\"\":{\"zwl\":\"datauyfxrxxleptramxj\",\"tdooaoj\":\"datanwxuqlcvydyp\"}}") - .toObject(Dataset.class); - Assertions.assertEquals("mjsjqb", model.description()); - Assertions.assertEquals("n", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ewzsyyceuzsoib").type()); - Assertions.assertEquals("uzf", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Dataset model = new Dataset().withDescription("mjsjqb") - .withStructure("datahyxxrwlycoduhpk") - .withSchema("datagymare") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("n") - .withParameters(mapOf("dgssofwqmzqal", "dataqugjhkycube", "cqqudf", "datarmnjijpx", "ayffim", - "databyxbaaabjy", "gsexne", "datazrtuzq"))) - .withParameters(mapOf("ewzsyyceuzsoib", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datanw"))) - .withAnnotations(Arrays.asList("datapfrxtrthzvay", "datadwkqbrq", "databpaxhexiilivpdt", "datairqtdqoa")) - .withFolder(new DatasetFolder().withName("uzf")) - .withAdditionalProperties(mapOf("type", "Dataset")); - model = BinaryData.fromObject(model).toObject(Dataset.class); - Assertions.assertEquals("mjsjqb", model.description()); - Assertions.assertEquals("n", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("ewzsyyceuzsoib").type()); - Assertions.assertEquals("uzf", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 6fbdfdd799bf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.Dataset; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetResource; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DatasetsCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Dataset\",\"description\":\"vlzeadqopw\",\"structure\":\"datarz\",\"schema\":\"datadezlhsdcpdbolczh\",\"linkedServiceName\":{\"referenceName\":\"qdvxqoajfoscdemf\",\"parameters\":{\"wljfdcyq\":\"datatzxtrjr\",\"flydywbnerygs\":\"dataqq\"}},\"parameters\":{\"svajnsuuxbyrv\":{\"type\":\"Int\",\"defaultValue\":\"datakc\"},\"usb\":{\"type\":\"SecureString\",\"defaultValue\":\"datajkysolmzrfhlynk\"},\"x\":{\"type\":\"SecureString\",\"defaultValue\":\"databjtsqfhnqxqtemvq\"}},\"annotations\":[\"dataatuiqc\"],\"folder\":{\"name\":\"kd\"},\"\":{\"gimt\":\"databxjkmavppo\",\"bjzhhjgvuvjsnb\":\"dataucls\",\"g\":\"datanuujkjkqyewtlom\"}},\"name\":\"qvrazt\",\"type\":\"uactizzhlnhg\",\"etag\":\"qciiopoam\",\"id\":\"e\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DatasetResource response = manager.datasets() - .define("rjdpzvh") - .withExistingFactory("wvqif", "tvfs") - .withProperties(new Dataset().withDescription("snq") - .withStructure("datavvpvuy") - .withSchema("datan") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dinnisuuak") - .withParameters(mapOf("vkcufwse", "databwhsvx", "neahowvjup", "dataqkale", "abilybmfaxep", - "dataibupgtrnjzbvb"))) - .withParameters(mapOf("jpbhcgesbt", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("datafnlksyqpkskbidmz"), - "sfln", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataenhlitcydg"), - "rdhxamjhpqfj", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagu"), "cillfq", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datairjkinofw"))) - .withAnnotations(Arrays.asList("dataifdrbkprblw", "databjse", "dataqqts", "dataupogtrwkuwn")) - .withFolder(new DatasetFolder().withName("jeo")) - .withAdditionalProperties(mapOf("type", "Dataset"))) - .withIfMatch("jbifixd") - .create(); - - Assertions.assertEquals("e", response.id()); - Assertions.assertEquals("vlzeadqopw", response.properties().description()); - Assertions.assertEquals("qdvxqoajfoscdemf", response.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, response.properties().parameters().get("svajnsuuxbyrv").type()); - Assertions.assertEquals("kd", response.properties().folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java deleted file mode 100644 index 24b9257daead..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DatasetsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.datasets() - .deleteWithResponse("tfsciayclvaivsa", "rfjhcrqnwoahfaq", "cqj", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java deleted file mode 100644 index e1f29b98ea5f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsGetWithResponseMockTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DatasetResource; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DatasetsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Dataset\",\"description\":\"ptvmtnougmf\",\"structure\":\"datas\",\"schema\":\"datatreihlszpus\",\"linkedServiceName\":{\"referenceName\":\"f\",\"parameters\":{\"oldbbliljgyrp\":\"datakueprpnzb\",\"aovqbnz\":\"datamaywp\"}},\"parameters\":{\"vvbsilahskesea\":{\"type\":\"Array\",\"defaultValue\":\"datanrowdqojxya\"},\"xiwc\":{\"type\":\"SecureString\",\"defaultValue\":\"datadyessiielbtg\"},\"yrzidoyvquufpl\":{\"type\":\"SecureString\",\"defaultValue\":\"datah\"}},\"annotations\":[\"datav\",\"databtrftotpv\",\"dataeh\",\"datafwrao\"],\"folder\":{\"name\":\"xew\"},\"\":{\"iqjegeafg\":\"datafqsreyuillrrqwkp\",\"osnvcwj\":\"dataqglljcblppn\",\"zmwbxautspnyutf\":\"datawgakghvaqbk\",\"birjnddaovgi\":\"dataqighnunptjm\"}},\"name\":\"mztrlnncvj\",\"type\":\"xucqxj\",\"etag\":\"m\",\"id\":\"bliegzj\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - DatasetResource response = manager.datasets() - .getWithResponse("isjboghjdihtc", "ddfvdktbaexbvyu", "rbycuuxgda", "flil", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("bliegzj", response.id()); - Assertions.assertEquals("ptvmtnougmf", response.properties().description()); - Assertions.assertEquals("f", response.properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, response.properties().parameters().get("vvbsilahskesea").type()); - Assertions.assertEquals("xew", response.properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java deleted file mode 100644 index a658565d236d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DatasetsListByFactoryMockTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.DatasetResource; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class DatasetsListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"Dataset\",\"description\":\"mxibpcnm\",\"structure\":\"datayzka\",\"schema\":\"datatiprriqwftrjd\",\"linkedServiceName\":{\"referenceName\":\"ingrcjoycqnd\",\"parameters\":{\"hntchigub\":\"datatzytesz\",\"dcmjfieydtnpqtwo\":\"dataidwgyazppefsdo\",\"goaxtwtkkmuir\":\"datafhsckecume\",\"iiudnmojjmimy\":\"datakoaxstqqjqliyxze\"}},\"parameters\":{\"zmijirpwltbl\":{\"type\":\"Float\",\"defaultValue\":\"datataaxluo\"}},\"annotations\":[\"databafcmsotudnkr\"],\"folder\":{\"name\":\"hyqeiguxixfe\"},\"\":{\"ikanybo\":\"datamavinumdngqyvzzr\",\"zzqolmoifxl\":\"dataagaigtpj\",\"jubjqjxobmv\":\"databwdkjqxqj\",\"arneug\":\"datajtzatr\"}},\"name\":\"pkjyo\",\"type\":\"wcxedkkd\",\"etag\":\"frisreh\",\"id\":\"fiflpiq\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.datasets().listByFactory("khlopy", "rsvyjrqhpz", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("fiflpiq", response.iterator().next().id()); - Assertions.assertEquals("mxibpcnm", response.iterator().next().properties().description()); - Assertions.assertEquals("ingrcjoycqnd", - response.iterator().next().properties().linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, - response.iterator().next().properties().parameters().get("zmijirpwltbl").type()); - Assertions.assertEquals("hyqeiguxixfe", response.iterator().next().properties().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java deleted file mode 100644 index b16220e87774..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2SourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Db2Source; - -public final class Db2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Db2Source model = BinaryData.fromString( - "{\"type\":\"Db2Source\",\"query\":\"datanemazgtby\",\"queryTimeout\":\"datashcawexgeqoj\",\"additionalColumns\":\"datauzxxkojjphbo\",\"sourceRetryCount\":\"dataovsvwnpcxd\",\"sourceRetryWait\":\"datatvpary\",\"maxConcurrentConnections\":\"datanyhmlpzde\",\"disableMetricsCollection\":\"datatucnzbpoc\",\"\":{\"t\":\"dataz\"}}") - .toObject(Db2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Db2Source model = new Db2Source().withSourceRetryCount("dataovsvwnpcxd") - .withSourceRetryWait("datatvpary") - .withMaxConcurrentConnections("datanyhmlpzde") - .withDisableMetricsCollection("datatucnzbpoc") - .withQueryTimeout("datashcawexgeqoj") - .withAdditionalColumns("datauzxxkojjphbo") - .withQuery("datanemazgtby"); - model = BinaryData.fromObject(model).toObject(Db2Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java deleted file mode 100644 index ae44d725567b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.Db2TableDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class Db2TableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Db2TableDataset model = BinaryData.fromString( - "{\"type\":\"Db2Table\",\"typeProperties\":{\"tableName\":\"datamw\",\"schema\":\"datavxgwz\",\"table\":\"datawdtlcjgpvc\"},\"description\":\"vzrbvgwxhlx\",\"structure\":\"dataxvmdr\",\"schema\":\"datan\",\"linkedServiceName\":{\"referenceName\":\"g\",\"parameters\":{\"hhplkhww\":\"datazoymd\",\"yearmhpwb\":\"datakatveqmgkcswzeyx\",\"smfasgtlv\":\"datakl\"}},\"parameters\":{\"vftmh\":{\"type\":\"Bool\",\"defaultValue\":\"datalosjaemcez\"}},\"annotations\":[\"dataokjyghzt\",\"datasmiwtpcflc\",\"datazswwvwi\"],\"folder\":{\"name\":\"jtvbfp\"},\"\":{\"umxquk\":\"datauptsyqcjnqswxdo\",\"clqddnhfknebw\":\"datacdio\",\"mkaqldqabnwvpaq\":\"dataddpnyzcwyj\"}}") - .toObject(Db2TableDataset.class); - Assertions.assertEquals("vzrbvgwxhlx", model.description()); - Assertions.assertEquals("g", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vftmh").type()); - Assertions.assertEquals("jtvbfp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Db2TableDataset model = new Db2TableDataset().withDescription("vzrbvgwxhlx") - .withStructure("dataxvmdr") - .withSchema("datan") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("g") - .withParameters( - mapOf("hhplkhww", "datazoymd", "yearmhpwb", "datakatveqmgkcswzeyx", "smfasgtlv", "datakl"))) - .withParameters(mapOf("vftmh", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalosjaemcez"))) - .withAnnotations(Arrays.asList("dataokjyghzt", "datasmiwtpcflc", "datazswwvwi")) - .withFolder(new DatasetFolder().withName("jtvbfp")) - .withTableName("datamw") - .withSchemaTypePropertiesSchema("datavxgwz") - .withTable("datawdtlcjgpvc"); - model = BinaryData.fromObject(model).toObject(Db2TableDataset.class); - Assertions.assertEquals("vzrbvgwxhlx", model.description()); - Assertions.assertEquals("g", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vftmh").type()); - Assertions.assertEquals("jtvbfp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java deleted file mode 100644 index fc7dd2b1ae89..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Db2TableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.Db2TableDatasetTypeProperties; - -public final class Db2TableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Db2TableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"dataxf\",\"schema\":\"dataigcfddofxnf\",\"table\":\"datajyyrqaedw\"}") - .toObject(Db2TableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Db2TableDatasetTypeProperties model = new Db2TableDatasetTypeProperties().withTableName("dataxf") - .withSchema("dataigcfddofxnf") - .withTable("datajyyrqaedw"); - model = BinaryData.fromObject(model).toObject(Db2TableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java deleted file mode 100644 index 4017213d57ce..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTests.java +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DeleteActivity; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogStorageSettings; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DeleteActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DeleteActivity model = BinaryData.fromString( - "{\"type\":\"Delete\",\"typeProperties\":{\"recursive\":\"dataqfwobnbluutmfiml\",\"maxConcurrentConnections\":497751591,\"enableLogging\":\"datadxjirfye\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"jrdcgeormxipwcq\",\"parameters\":{\"cuhas\":\"datanmfnfatwi\",\"whotjcgdpqk\":\"dataielhtukhei\",\"qm\":\"datax\",\"mjuqq\":\"datavrglqlvmkeseyqo\"}},\"path\":\"datamzletackjuwkkvar\",\"logLevel\":\"datajuefj\",\"enableReliableLogging\":\"dataowqwod\",\"\":{\"hciapvc\":\"datadtywajqw\",\"ltftaqmrimlet\":\"datawzjrpcpg\",\"ptf\":\"datav\"}},\"dataset\":{\"referenceName\":\"ab\",\"parameters\":{\"szxupwri\":\"datahzfglpsw\",\"haskuiosl\":\"datakqnbiiandhsjp\"}},\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datacuh\",\"disableMetricsCollection\":\"datarfhmr\",\"\":{\"aaabtxrhemnkyk\":\"datanojfdiijch\",\"dqhjpzee\":\"dataruomwyoktzffp\",\"o\":\"datavkuvykiyrfo\"}}},\"linkedServiceName\":{\"referenceName\":\"oiks\",\"parameters\":{\"rmugkugwtgfktw\":\"datakbdh\",\"isatb\":\"datayhauhqvxeyl\",\"xxq\":\"dataghtkdcuf\"}},\"policy\":{\"timeout\":\"datavfvqvernqk\",\"retry\":\"datayyysvtjoxw\",\"retryIntervalInSeconds\":1319842201,\"secureInput\":true,\"secureOutput\":false,\"\":{\"c\":\"dataiz\"}},\"name\":\"pddzzdw\",\"description\":\"vmocnfzmuyykxlfl\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"n\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"gpomcrevt\":\"datavnb\",\"xpnraeo\":\"datazclilzveisdnobxc\",\"xvsuhxrctcozfj\":\"dataixof\",\"rrwbcycwasmrfbw\":\"datafrbjrbqc\"}}],\"userProperties\":[{\"name\":\"mhhvbovblxfyle\",\"value\":\"datagd\"},{\"name\":\"iurfemnykfzsouo\",\"value\":\"dataezszlr\"}],\"\":{\"pkxprbutyjfhjh\":\"dataldgiij\",\"ljkqlruhhk\":\"datav\"}}") - .toObject(DeleteActivity.class); - Assertions.assertEquals("pddzzdw", model.name()); - Assertions.assertEquals("vmocnfzmuyykxlfl", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("n", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mhhvbovblxfyle", model.userProperties().get(0).name()); - Assertions.assertEquals("oiks", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1319842201, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(497751591, model.maxConcurrentConnections()); - Assertions.assertEquals("jrdcgeormxipwcq", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ab", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DeleteActivity model = new DeleteActivity().withName("pddzzdw") - .withDescription("vmocnfzmuyykxlfl") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("n") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("mhhvbovblxfyle").withValue("datagd"), - new UserProperty().withName("iurfemnykfzsouo").withValue("dataezszlr"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("oiks") - .withParameters(mapOf("rmugkugwtgfktw", "datakbdh", "isatb", "datayhauhqvxeyl", "xxq", "dataghtkdcuf"))) - .withPolicy(new ActivityPolicy().withTimeout("datavfvqvernqk") - .withRetry("datayyysvtjoxw") - .withRetryIntervalInSeconds(1319842201) - .withSecureInput(true) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withRecursive("dataqfwobnbluutmfiml") - .withMaxConcurrentConnections(497751591) - .withEnableLogging("datadxjirfye") - .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jrdcgeormxipwcq") - .withParameters(mapOf("cuhas", "datanmfnfatwi", "whotjcgdpqk", "dataielhtukhei", "qm", "datax", - "mjuqq", "datavrglqlvmkeseyqo"))) - .withPath("datamzletackjuwkkvar") - .withLogLevel("datajuefj") - .withEnableReliableLogging("dataowqwod") - .withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("ab") - .withParameters(mapOf("szxupwri", "datahzfglpsw", "haskuiosl", "datakqnbiiandhsjp"))) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datacuh") - .withDisableMetricsCollection("datarfhmr") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))); - model = BinaryData.fromObject(model).toObject(DeleteActivity.class); - Assertions.assertEquals("pddzzdw", model.name()); - Assertions.assertEquals("vmocnfzmuyykxlfl", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("n", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mhhvbovblxfyle", model.userProperties().get(0).name()); - Assertions.assertEquals("oiks", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1319842201, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(497751591, model.maxConcurrentConnections()); - Assertions.assertEquals("jrdcgeormxipwcq", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("ab", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java deleted file mode 100644 index 16711e9ca49b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteActivityTypePropertiesTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DeleteActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogStorageSettings; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DeleteActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DeleteActivityTypeProperties model = BinaryData.fromString( - "{\"recursive\":\"datafgrms\",\"maxConcurrentConnections\":1585292907,\"enableLogging\":\"datarb\",\"logStorageSettings\":{\"linkedServiceName\":{\"referenceName\":\"mfp\",\"parameters\":{\"dyxqjy\":\"datadcvwsb\"}},\"path\":\"dataouyfcfded\",\"logLevel\":\"dataphgnfaanubjeboel\",\"enableReliableLogging\":\"datag\",\"\":{\"mjdhwnf\":\"datawmv\",\"vlkpwavd\":\"datajhhpfjz\",\"bitudwuoxir\":\"datapecjmovrsrtldijg\",\"gb\":\"datailuzokxphcjtwh\"}},\"dataset\":{\"referenceName\":\"jmgctwamjjw\",\"parameters\":{\"m\":\"datagistnyzmhhope\",\"qtgfbmocvb\":\"dataxtdyxzg\"}},\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datakqxhkh\",\"disableMetricsCollection\":\"datarcqpxaajt\",\"\":{\"azmxssbv\":\"dataqtuztzmubxngs\"}}}") - .toObject(DeleteActivityTypeProperties.class); - Assertions.assertEquals(1585292907, model.maxConcurrentConnections()); - Assertions.assertEquals("mfp", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("jmgctwamjjw", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DeleteActivityTypeProperties model = new DeleteActivityTypeProperties().withRecursive("datafgrms") - .withMaxConcurrentConnections(1585292907) - .withEnableLogging("datarb") - .withLogStorageSettings(new LogStorageSettings() - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("mfp").withParameters(mapOf("dyxqjy", "datadcvwsb"))) - .withPath("dataouyfcfded") - .withLogLevel("dataphgnfaanubjeboel") - .withEnableReliableLogging("datag") - .withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("jmgctwamjjw") - .withParameters(mapOf("m", "datagistnyzmhhope", "qtgfbmocvb", "dataxtdyxzg"))) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datakqxhkh") - .withDisableMetricsCollection("datarcqpxaajt") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))); - model = BinaryData.fromObject(model).toObject(DeleteActivityTypeProperties.class); - Assertions.assertEquals(1585292907, model.maxConcurrentConnections()); - Assertions.assertEquals("mfp", model.logStorageSettings().linkedServiceName().referenceName()); - Assertions.assertEquals("jmgctwamjjw", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java deleted file mode 100644 index 6416b5ab534c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DeleteDataFlowDebugSessionRequestTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest; -import org.junit.jupiter.api.Assertions; - -public final class DeleteDataFlowDebugSessionRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DeleteDataFlowDebugSessionRequest model - = BinaryData.fromString("{\"sessionId\":\"pnwjfujq\"}").toObject(DeleteDataFlowDebugSessionRequest.class); - Assertions.assertEquals("pnwjfujq", model.sessionId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DeleteDataFlowDebugSessionRequest model = new DeleteDataFlowDebugSessionRequest().withSessionId("pnwjfujq"); - model = BinaryData.fromObject(model).toObject(DeleteDataFlowDebugSessionRequest.class); - Assertions.assertEquals("pnwjfujq", model.sessionId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java deleted file mode 100644 index 488135aa0bc7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextReadSettingsTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.DelimitedTextReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class DelimitedTextReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DelimitedTextReadSettings model = BinaryData.fromString( - "{\"type\":\"DelimitedTextReadSettings\",\"skipLineCount\":\"dataksixhornvydx\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"ttiqwixsdxxflwfv\":\"dataqeayjd\",\"uqetvbqhyszfl\":\"datahbwhr\",\"ayfklbg\":\"datajzdciwxlggrt\",\"dp\":\"datahb\"}},\"\":{\"hjtrashnfofiy\":\"datasqznv\",\"ccpqboubehzipyi\":\"dataloeq\",\"inxkothrnlg\":\"datafsuuvoqqse\",\"jnhhiofcnyz\":\"datawwjikgpwdczzkzib\"}}") - .toObject(DelimitedTextReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DelimitedTextReadSettings model = new DelimitedTextReadSettings().withSkipLineCount("dataksixhornvydx") - .withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); - model = BinaryData.fromObject(model).toObject(DelimitedTextReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java deleted file mode 100644 index 787f74a8f86c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSinkTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DelimitedTextSink; -import com.azure.resourcemanager.datafactory.models.DelimitedTextWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class DelimitedTextSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DelimitedTextSink model = BinaryData.fromString( - "{\"type\":\"DelimitedTextSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataaocvetzk\",\"disableMetricsCollection\":\"datalbclspqvxz\",\"copyBehavior\":\"datau\",\"metadata\":[{\"name\":\"datagpbv\",\"value\":\"datakpzdkiyww\"},{\"name\":\"datavxuhzi\",\"value\":\"dataanbtqejfq\"},{\"name\":\"datagadrvxbcye\",\"value\":\"datajbcbrtiqpj\"},{\"name\":\"datakamhdqluicrqxqj\",\"value\":\"dataosmlhcppfgtns\"}],\"\":{\"mfhde\":\"datahztnjpkpmdlt\",\"xpebsxcnhq\":\"dataliaaiqyxlro\",\"rdamyumr\":\"datacbtyor\",\"ygj\":\"databbaxnym\"}},\"formatSettings\":{\"type\":\"DelimitedTextWriteSettings\",\"quoteAllText\":\"datakakgwlqzn\",\"fileExtension\":\"databsdgyheyayktutf\",\"maxRowsPerFile\":\"datae\",\"fileNamePrefix\":\"dataxefs\",\"\":{\"sz\":\"datadcoeexwg\",\"jhghi\":\"dataqzm\",\"bjvmdkgvu\":\"datakhjldwxdqtjhtgnc\"}},\"writeBatchSize\":\"datamlsuuhwuox\",\"writeBatchTimeout\":\"datai\",\"sinkRetryCount\":\"datazzjo\",\"sinkRetryWait\":\"dataygzjrkslqba\",\"maxConcurrentConnections\":\"databjxxcruleim\",\"disableMetricsCollection\":\"dataxoign\",\"\":{\"vf\":\"datajmpgzet\",\"fks\":\"datasijpmeptnqsnp\",\"qol\":\"datarrvjwbeeolmob\"}}") - .toObject(DelimitedTextSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DelimitedTextSink model = new DelimitedTextSink().withWriteBatchSize("datamlsuuhwuox") - .withWriteBatchTimeout("datai") - .withSinkRetryCount("datazzjo") - .withSinkRetryWait("dataygzjrkslqba") - .withMaxConcurrentConnections("databjxxcruleim") - .withDisableMetricsCollection("dataxoign") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataaocvetzk") - .withDisableMetricsCollection("datalbclspqvxz") - .withCopyBehavior("datau") - .withMetadata(Arrays.asList(new MetadataItem().withName("datagpbv").withValue("datakpzdkiyww"), - new MetadataItem().withName("datavxuhzi").withValue("dataanbtqejfq"), - new MetadataItem().withName("datagadrvxbcye").withValue("datajbcbrtiqpj"), - new MetadataItem().withName("datakamhdqluicrqxqj").withValue("dataosmlhcppfgtns"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new DelimitedTextWriteSettings().withQuoteAllText("datakakgwlqzn") - .withFileExtension("databsdgyheyayktutf") - .withMaxRowsPerFile("datae") - .withFileNamePrefix("dataxefs")); - model = BinaryData.fromObject(model).toObject(DelimitedTextSink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java deleted file mode 100644 index 05104cff8ded..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextSourceTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.DelimitedTextReadSettings; -import com.azure.resourcemanager.datafactory.models.DelimitedTextSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class DelimitedTextSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DelimitedTextSource model = BinaryData.fromString( - "{\"type\":\"DelimitedTextSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datayiwzou\",\"disableMetricsCollection\":\"dataamdgff\",\"\":{\"jwlpcxljzzcdrgtu\":\"dataisoorwfdtjp\",\"rolge\":\"dataoouocafaxvhjrpb\",\"njtxuuwdmrqah\":\"datasgn\",\"lmacbwmvphmjyzic\":\"databyjahbzb\"}},\"formatSettings\":{\"type\":\"DelimitedTextReadSettings\",\"skipLineCount\":\"datalazcgwnibnduqgj\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"uhumgw\":\"dataxiaocr\",\"okoxqboz\":\"datapugnvhtgwadu\"}},\"\":{\"ukqobovqll\":\"datazwinr\",\"ozqlyputawdm\":\"dataqlqufkrnrbnjkco\",\"jzofyldxkzhvfo\":\"dataikufzqvvk\"}},\"additionalColumns\":\"datavnhpebuiyw\",\"sourceRetryCount\":\"datasgqacbeau\",\"sourceRetryWait\":\"datadbdnmguifq\",\"maxConcurrentConnections\":\"dataoxzxbljpzauug\",\"disableMetricsCollection\":\"datarfumitjaii\",\"\":{\"sqrjcozrw\":\"datakfdybvywbgmjrv\",\"tvxkxg\":\"dataylc\",\"qaaqjbl\":\"datafpvvqwvvnx\",\"tsztxoswvfrym\":\"dataqwwtevfeugc\"}}") - .toObject(DelimitedTextSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DelimitedTextSource model - = new DelimitedTextSource().withSourceRetryCount("datasgqacbeau") - .withSourceRetryWait("datadbdnmguifq") - .withMaxConcurrentConnections("dataoxzxbljpzauug") - .withDisableMetricsCollection("datarfumitjaii") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datayiwzou") - .withDisableMetricsCollection("dataamdgff") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new DelimitedTextReadSettings().withSkipLineCount("datalazcgwnibnduqgj") - .withCompressionProperties(new CompressionReadSettings() - .withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datavnhpebuiyw"); - model = BinaryData.fromObject(model).toObject(DelimitedTextSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java deleted file mode 100644 index 90d4b3cd30d7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DelimitedTextWriteSettingsTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DelimitedTextWriteSettings; - -public final class DelimitedTextWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DelimitedTextWriteSettings model = BinaryData.fromString( - "{\"type\":\"DelimitedTextWriteSettings\",\"quoteAllText\":\"datajxhmkchj\",\"fileExtension\":\"dataxrbbh\",\"maxRowsPerFile\":\"dataxuqhyrb\",\"fileNamePrefix\":\"datapgadesnesgnx\",\"\":{\"icto\":\"dataxtefv\"}}") - .toObject(DelimitedTextWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DelimitedTextWriteSettings model = new DelimitedTextWriteSettings().withQuoteAllText("datajxhmkchj") - .withFileExtension("dataxrbbh") - .withMaxRowsPerFile("dataxuqhyrb") - .withFileNamePrefix("datapgadesnesgnx"); - model = BinaryData.fromObject(model).toObject(DelimitedTextWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java deleted file mode 100644 index 9872370dba56..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DependencyReferenceTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DependencyReference; - -public final class DependencyReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DependencyReference model - = BinaryData.fromString("{\"type\":\"DependencyReference\"}").toObject(DependencyReference.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DependencyReference model = new DependencyReference(); - model = BinaryData.fromObject(model).toObject(DependencyReference.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java deleted file mode 100644 index 230cdacba1e2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DistcpSettingsTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DistcpSettings; - -public final class DistcpSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DistcpSettings model = BinaryData.fromString( - "{\"resourceManagerEndpoint\":\"datavdfeyaevzjqfxf\",\"tempScriptPath\":\"datayjzu\",\"distcpOptions\":\"dataxb\"}") - .toObject(DistcpSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DistcpSettings model = new DistcpSettings().withResourceManagerEndpoint("datavdfeyaevzjqfxf") - .withTempScriptPath("datayjzu") - .withDistcpOptions("dataxb"); - model = BinaryData.fromObject(model).toObject(DistcpSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java deleted file mode 100644 index 54ac126c688f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DocumentDbCollectionDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DocumentDbCollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DocumentDbCollectionDataset model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollection\",\"typeProperties\":{\"collectionName\":\"dataja\"},\"description\":\"vzff\",\"structure\":\"datatj\",\"schema\":\"dataoss\",\"linkedServiceName\":{\"referenceName\":\"fjxtvlxxzqfc\",\"parameters\":{\"hjmbji\":\"dataiomxeezw\",\"hpyvdkgdet\":\"dataegmxdbsohc\",\"canzb\":\"dataz\"}},\"parameters\":{\"smuffiwjb\":{\"type\":\"Int\",\"defaultValue\":\"datayckyvne\"},\"scsl\":{\"type\":\"Object\",\"defaultValue\":\"databpzujqjtotdxp\"}},\"annotations\":[\"datausiecktybhjuxid\",\"datahxomilddxjx\",\"dataugxwjwilmqrslaat\"],\"folder\":{\"name\":\"wuj\"},\"\":{\"qqy\":\"dataxqgqwlxrhgtvhv\",\"ejogmkor\":\"databkkteo\",\"ofnqhlbs\":\"datavmvm\",\"yalhtgm\":\"dataosnqliwkmzojfe\"}}") - .toObject(DocumentDbCollectionDataset.class); - Assertions.assertEquals("vzff", model.description()); - Assertions.assertEquals("fjxtvlxxzqfc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("smuffiwjb").type()); - Assertions.assertEquals("wuj", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DocumentDbCollectionDataset model = new DocumentDbCollectionDataset().withDescription("vzff") - .withStructure("datatj") - .withSchema("dataoss") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("fjxtvlxxzqfc") - .withParameters(mapOf("hjmbji", "dataiomxeezw", "hpyvdkgdet", "dataegmxdbsohc", "canzb", "dataz"))) - .withParameters(mapOf("smuffiwjb", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayckyvne"), "scsl", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("databpzujqjtotdxp"))) - .withAnnotations(Arrays.asList("datausiecktybhjuxid", "datahxomilddxjx", "dataugxwjwilmqrslaat")) - .withFolder(new DatasetFolder().withName("wuj")) - .withCollectionName("dataja"); - model = BinaryData.fromObject(model).toObject(DocumentDbCollectionDataset.class); - Assertions.assertEquals("vzff", model.description()); - Assertions.assertEquals("fjxtvlxxzqfc", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("smuffiwjb").type()); - Assertions.assertEquals("wuj", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 858e5e1001cd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DocumentDbCollectionDatasetTypeProperties; - -public final class DocumentDbCollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DocumentDbCollectionDatasetTypeProperties model - = BinaryData.fromString("{\"collectionName\":\"datanazgbjbhrpgiq\"}") - .toObject(DocumentDbCollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DocumentDbCollectionDatasetTypeProperties model - = new DocumentDbCollectionDatasetTypeProperties().withCollectionName("datanazgbjbhrpgiq"); - model = BinaryData.fromObject(model).toObject(DocumentDbCollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java deleted file mode 100644 index d8bcd8661c1d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSinkTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DocumentDbCollectionSink; - -public final class DocumentDbCollectionSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DocumentDbCollectionSink model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollectionSink\",\"nestingSeparator\":\"dataylvrofhhitjhh\",\"writeBehavior\":\"datavwrc\",\"writeBatchSize\":\"datahllmblls\",\"writeBatchTimeout\":\"datafdrimoopfr\",\"sinkRetryCount\":\"datajjrhxornuoqpob\",\"sinkRetryWait\":\"datarsdx\",\"maxConcurrentConnections\":\"datamq\",\"disableMetricsCollection\":\"dataxbqyavcxjols\",\"\":{\"pnms\":\"datax\"}}") - .toObject(DocumentDbCollectionSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DocumentDbCollectionSink model = new DocumentDbCollectionSink().withWriteBatchSize("datahllmblls") - .withWriteBatchTimeout("datafdrimoopfr") - .withSinkRetryCount("datajjrhxornuoqpob") - .withSinkRetryWait("datarsdx") - .withMaxConcurrentConnections("datamq") - .withDisableMetricsCollection("dataxbqyavcxjols") - .withNestingSeparator("dataylvrofhhitjhh") - .withWriteBehavior("datavwrc"); - model = BinaryData.fromObject(model).toObject(DocumentDbCollectionSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java deleted file mode 100644 index cf1821640b18..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DocumentDbCollectionSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DocumentDbCollectionSource; - -public final class DocumentDbCollectionSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DocumentDbCollectionSource model = BinaryData.fromString( - "{\"type\":\"DocumentDbCollectionSource\",\"query\":\"dataeadokuqn\",\"nestingSeparator\":\"dataskfl\",\"queryTimeout\":\"dataaysrjeevm\",\"additionalColumns\":\"dataemrhbzetss\",\"sourceRetryCount\":\"datawexbotbrep\",\"sourceRetryWait\":\"datarlieeocyarvsfz\",\"maxConcurrentConnections\":\"datascootfsgilwis\",\"disableMetricsCollection\":\"datazpzitustrtrf\",\"\":{\"qtu\":\"dataknocshmpc\",\"amctzmwrhccdg\":\"databirbrvzhfjqpxyd\"}}") - .toObject(DocumentDbCollectionSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DocumentDbCollectionSource model = new DocumentDbCollectionSource().withSourceRetryCount("datawexbotbrep") - .withSourceRetryWait("datarlieeocyarvsfz") - .withMaxConcurrentConnections("datascootfsgilwis") - .withDisableMetricsCollection("datazpzitustrtrf") - .withQuery("dataeadokuqn") - .withNestingSeparator("dataskfl") - .withQueryTimeout("dataaysrjeevm") - .withAdditionalColumns("dataemrhbzetss"); - model = BinaryData.fromObject(model).toObject(DocumentDbCollectionSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java deleted file mode 100644 index ede82f1cd0c9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DrillDatasetTypeProperties; - -public final class DrillDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DrillDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"dataqnps\",\"table\":\"dataeuybu\",\"schema\":\"datadzjfjtvpeyxdyuxu\"}") - .toObject(DrillDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DrillDatasetTypeProperties model = new DrillDatasetTypeProperties().withTableName("dataqnps") - .withTable("dataeuybu") - .withSchema("datadzjfjtvpeyxdyuxu"); - model = BinaryData.fromObject(model).toObject(DrillDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java deleted file mode 100644 index 9ba2a477a9a6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DrillSource; - -public final class DrillSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DrillSource model = BinaryData.fromString( - "{\"type\":\"DrillSource\",\"query\":\"datadbvcxoamh\",\"queryTimeout\":\"datauhnbdlz\",\"additionalColumns\":\"dataectzjjgvcbt\",\"sourceRetryCount\":\"datampnkyvujhej\",\"sourceRetryWait\":\"datarvlguy\",\"maxConcurrentConnections\":\"datarngnbqhmuqyzxko\",\"disableMetricsCollection\":\"datarcjshtcfnbf\",\"\":{\"huboqozx\":\"dataxlyhxpdqo\"}}") - .toObject(DrillSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DrillSource model = new DrillSource().withSourceRetryCount("datampnkyvujhej") - .withSourceRetryWait("datarvlguy") - .withMaxConcurrentConnections("datarngnbqhmuqyzxko") - .withDisableMetricsCollection("datarcjshtcfnbf") - .withQueryTimeout("datauhnbdlz") - .withAdditionalColumns("dataectzjjgvcbt") - .withQuery("datadbvcxoamh"); - model = BinaryData.fromObject(model).toObject(DrillSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java deleted file mode 100644 index d704d2cad700..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DrillTableDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DrillTableDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DrillTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DrillTableDataset model = BinaryData.fromString( - "{\"type\":\"DrillTable\",\"typeProperties\":{\"tableName\":\"dataoxfalokbskkypo\",\"table\":\"dataynieunbydlgfaphw\",\"schema\":\"databwtsaynrt\"},\"description\":\"fqreeoxvqjmrnbl\",\"structure\":\"datasdbfbmdiv\",\"schema\":\"datazhpjgqz\",\"linkedServiceName\":{\"referenceName\":\"iaoaweacfxa\",\"parameters\":{\"k\":\"datahruetcnxriqzzd\",\"wobwxrxm\":\"datasqdrrjsurn\",\"ouqjp\":\"dataokohlsfj\"}},\"parameters\":{\"ymudj\":{\"type\":\"SecureString\",\"defaultValue\":\"datauvkqxqkvadmj\"}},\"annotations\":[\"datajzdebhsermcly\",\"datawwuhyqkaapt\"],\"folder\":{\"name\":\"vkbcbptwyb\"},\"\":{\"xzbnss\":\"datacuhp\"}}") - .toObject(DrillTableDataset.class); - Assertions.assertEquals("fqreeoxvqjmrnbl", model.description()); - Assertions.assertEquals("iaoaweacfxa", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ymudj").type()); - Assertions.assertEquals("vkbcbptwyb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DrillTableDataset model = new DrillTableDataset().withDescription("fqreeoxvqjmrnbl") - .withStructure("datasdbfbmdiv") - .withSchema("datazhpjgqz") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iaoaweacfxa") - .withParameters( - mapOf("k", "datahruetcnxriqzzd", "wobwxrxm", "datasqdrrjsurn", "ouqjp", "dataokohlsfj"))) - .withParameters(mapOf("ymudj", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datauvkqxqkvadmj"))) - .withAnnotations(Arrays.asList("datajzdebhsermcly", "datawwuhyqkaapt")) - .withFolder(new DatasetFolder().withName("vkbcbptwyb")) - .withTableName("dataoxfalokbskkypo") - .withTable("dataynieunbydlgfaphw") - .withSchemaTypePropertiesSchema("databwtsaynrt"); - model = BinaryData.fromObject(model).toObject(DrillTableDataset.class); - Assertions.assertEquals("fqreeoxvqjmrnbl", model.description()); - Assertions.assertEquals("iaoaweacfxa", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("ymudj").type()); - Assertions.assertEquals("vkbcbptwyb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java deleted file mode 100644 index 198de9d59b2d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DynamicsAXResourceDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DynamicsAXResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsAXResourceDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsAXResource\",\"typeProperties\":{\"path\":\"dataz\"},\"description\":\"qrzvuxnx\",\"structure\":\"dataohshzultdb\",\"schema\":\"datajdhypngocbdx\",\"linkedServiceName\":{\"referenceName\":\"rivptbczsu\",\"parameters\":{\"maatvogpyc\":\"datazukekytkzvt\",\"hbdxsbypl\":\"datainha\",\"sezsggdp\":\"datakhwfjudapbq\",\"iasfzrguz\":\"datatfcbrtsrdplqdyza\"}},\"parameters\":{\"abdjuljgxotu\":{\"type\":\"Int\",\"defaultValue\":\"databfsin\"},\"gspxldlnoc\":{\"type\":\"Object\",\"defaultValue\":\"datakiyaosthulzugi\"},\"wm\":{\"type\":\"Array\",\"defaultValue\":\"datagimizlukj\"},\"jtryjskdiylgzzuq\":{\"type\":\"Object\",\"defaultValue\":\"datarlmspppoeszt\"}},\"annotations\":[\"datasybqowgvmxwbo\",\"dataxdhkoex\"],\"folder\":{\"name\":\"gnaka\"},\"\":{\"ne\":\"datac\",\"yhmgq\":\"datavlumqeumz\"}}") - .toObject(DynamicsAXResourceDataset.class); - Assertions.assertEquals("qrzvuxnx", model.description()); - Assertions.assertEquals("rivptbczsu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("abdjuljgxotu").type()); - Assertions.assertEquals("gnaka", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsAXResourceDataset model = new DynamicsAXResourceDataset().withDescription("qrzvuxnx") - .withStructure("dataohshzultdb") - .withSchema("datajdhypngocbdx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("rivptbczsu") - .withParameters(mapOf("maatvogpyc", "datazukekytkzvt", "hbdxsbypl", "datainha", "sezsggdp", - "datakhwfjudapbq", "iasfzrguz", "datatfcbrtsrdplqdyza"))) - .withParameters(mapOf("abdjuljgxotu", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databfsin"), "gspxldlnoc", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakiyaosthulzugi"), - "wm", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagimizlukj"), - "jtryjskdiylgzzuq", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datarlmspppoeszt"))) - .withAnnotations(Arrays.asList("datasybqowgvmxwbo", "dataxdhkoex")) - .withFolder(new DatasetFolder().withName("gnaka")) - .withPath("dataz"); - model = BinaryData.fromObject(model).toObject(DynamicsAXResourceDataset.class); - Assertions.assertEquals("qrzvuxnx", model.description()); - Assertions.assertEquals("rivptbczsu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("abdjuljgxotu").type()); - Assertions.assertEquals("gnaka", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java deleted file mode 100644 index 15bcdb943c15..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DynamicsAXResourceDatasetTypeProperties; - -public final class DynamicsAXResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsAXResourceDatasetTypeProperties model = BinaryData.fromString("{\"path\":\"dataeivjqutxr\"}") - .toObject(DynamicsAXResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsAXResourceDatasetTypeProperties model - = new DynamicsAXResourceDatasetTypeProperties().withPath("dataeivjqutxr"); - model = BinaryData.fromObject(model).toObject(DynamicsAXResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java deleted file mode 100644 index cfb51dc75252..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsAXSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DynamicsAXSource; - -public final class DynamicsAXSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsAXSource model = BinaryData.fromString( - "{\"type\":\"DynamicsAXSource\",\"query\":\"datadmwa\",\"httpRequestTimeout\":\"datapbuqkdieuopwsa\",\"queryTimeout\":\"datahmizcfk\",\"additionalColumns\":\"datafmoonnria\",\"sourceRetryCount\":\"dataygzkdbmjzobc\",\"sourceRetryWait\":\"databbuuipelokptteo\",\"maxConcurrentConnections\":\"datahwgj\",\"disableMetricsCollection\":\"datax\",\"\":{\"hslnq\":\"datawjgkxvkj\",\"vwqiwg\":\"datamwwtzx\"}}") - .toObject(DynamicsAXSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsAXSource model = new DynamicsAXSource().withSourceRetryCount("dataygzkdbmjzobc") - .withSourceRetryWait("databbuuipelokptteo") - .withMaxConcurrentConnections("datahwgj") - .withDisableMetricsCollection("datax") - .withQueryTimeout("datahmizcfk") - .withAdditionalColumns("datafmoonnria") - .withQuery("datadmwa") - .withHttpRequestTimeout("datapbuqkdieuopwsa"); - model = BinaryData.fromObject(model).toObject(DynamicsAXSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java deleted file mode 100644 index 3627b8062196..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DynamicsCrmEntityDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DynamicsCrmEntityDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsCrmEntityDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsCrmEntity\",\"typeProperties\":{\"entityName\":\"datakcbkfukdlj\"},\"description\":\"vtsdydshkpafy\",\"structure\":\"dataoowwziz\",\"schema\":\"datayuf\",\"linkedServiceName\":{\"referenceName\":\"qz\",\"parameters\":{\"ndjokgwesymzqh\":\"datafgufyjfdkqiy\",\"ujqbbgsimwejl\":\"dataqpfzlpejtznxlue\",\"bcpr\":\"databkbpjzobd\",\"ydtnaczkfwfatgaw\":\"dataswku\"}},\"parameters\":{\"mecsaa\":{\"type\":\"Bool\",\"defaultValue\":\"dataivdwgtqc\"},\"hzhrbkhtm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataqbdiuycsbskowkr\"},\"ucyhfaimq\":{\"type\":\"Bool\",\"defaultValue\":\"dataiuasfgq\"}},\"annotations\":[\"datauozkgyf\",\"dataeoehgfmqmskkixvl\",\"datajxplh\"],\"folder\":{\"name\":\"asyntv\"},\"\":{\"bqvknmpecqxgiq\":\"dataielbqrv\",\"stlpwqp\":\"datasifubns\"}}") - .toObject(DynamicsCrmEntityDataset.class); - Assertions.assertEquals("vtsdydshkpafy", model.description()); - Assertions.assertEquals("qz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("mecsaa").type()); - Assertions.assertEquals("asyntv", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsCrmEntityDataset model = new DynamicsCrmEntityDataset().withDescription("vtsdydshkpafy") - .withStructure("dataoowwziz") - .withSchema("datayuf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qz") - .withParameters(mapOf("ndjokgwesymzqh", "datafgufyjfdkqiy", "ujqbbgsimwejl", "dataqpfzlpejtznxlue", - "bcpr", "databkbpjzobd", "ydtnaczkfwfatgaw", "dataswku"))) - .withParameters(mapOf("mecsaa", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataivdwgtqc"), "hzhrbkhtm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataqbdiuycsbskowkr"), - "ucyhfaimq", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataiuasfgq"))) - .withAnnotations(Arrays.asList("datauozkgyf", "dataeoehgfmqmskkixvl", "datajxplh")) - .withFolder(new DatasetFolder().withName("asyntv")) - .withEntityName("datakcbkfukdlj"); - model = BinaryData.fromObject(model).toObject(DynamicsCrmEntityDataset.class); - Assertions.assertEquals("vtsdydshkpafy", model.description()); - Assertions.assertEquals("qz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("mecsaa").type()); - Assertions.assertEquals("asyntv", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java deleted file mode 100644 index 19383d33fef8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmEntityDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DynamicsCrmEntityDatasetTypeProperties; - -public final class DynamicsCrmEntityDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsCrmEntityDatasetTypeProperties model = BinaryData.fromString("{\"entityName\":\"dataxjkhtupsv\"}") - .toObject(DynamicsCrmEntityDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsCrmEntityDatasetTypeProperties model - = new DynamicsCrmEntityDatasetTypeProperties().withEntityName("dataxjkhtupsv"); - model = BinaryData.fromObject(model).toObject(DynamicsCrmEntityDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java deleted file mode 100644 index a8030c2a57b0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsCrmSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DynamicsCrmSource; - -public final class DynamicsCrmSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsCrmSource model = BinaryData.fromString( - "{\"type\":\"DynamicsCrmSource\",\"query\":\"databomjby\",\"additionalColumns\":\"dataprkbzraljwfnc\",\"sourceRetryCount\":\"dataaylcpgzmx\",\"sourceRetryWait\":\"datappqajdm\",\"maxConcurrentConnections\":\"datanntqqgu\",\"disableMetricsCollection\":\"datanwrzimin\",\"\":{\"dlclxxquyff\":\"datazfwfuxdtpjcsqk\",\"b\":\"dataqombdsgqxacidu\"}}") - .toObject(DynamicsCrmSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsCrmSource model = new DynamicsCrmSource().withSourceRetryCount("dataaylcpgzmx") - .withSourceRetryWait("datappqajdm") - .withMaxConcurrentConnections("datanntqqgu") - .withDisableMetricsCollection("datanwrzimin") - .withQuery("databomjby") - .withAdditionalColumns("dataprkbzraljwfnc"); - model = BinaryData.fromObject(model).toObject(DynamicsCrmSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java deleted file mode 100644 index 312fcc40e597..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DynamicsEntityDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class DynamicsEntityDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsEntityDataset model = BinaryData.fromString( - "{\"type\":\"DynamicsEntity\",\"typeProperties\":{\"entityName\":\"datacucrcm\"},\"description\":\"ixpqj\",\"structure\":\"dataifhb\",\"schema\":\"dataldtt\",\"linkedServiceName\":{\"referenceName\":\"hqclnaihtgs\",\"parameters\":{\"mtrawrqkza\":\"datakvllrhtpmglxkoi\",\"paklw\":\"databun\",\"zltkaszfj\":\"datalweeprne\"}},\"parameters\":{\"pg\":{\"type\":\"Object\",\"defaultValue\":\"datadux\"},\"nwsb\":{\"type\":\"Float\",\"defaultValue\":\"dataebmvrdjom\"},\"esdfedsb\":{\"type\":\"SecureString\",\"defaultValue\":\"datalsflxk\"},\"i\":{\"type\":\"Object\",\"defaultValue\":\"datancoinmphymc\"}},\"annotations\":[\"datatvdhqnufbxwe\",\"datai\",\"datantojovfnybydhuih\"],\"folder\":{\"name\":\"wud\"},\"\":{\"mosqircamqprlob\":\"dataorhjkehwv\",\"gelajdyolje\":\"dataugejcvjkjyczcmt\",\"jkjymgqbgcx\":\"datayxqfmzsizzhravr\",\"lermkmerg\":\"datanqxgz\"}}") - .toObject(DynamicsEntityDataset.class); - Assertions.assertEquals("ixpqj", model.description()); - Assertions.assertEquals("hqclnaihtgs", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("pg").type()); - Assertions.assertEquals("wud", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsEntityDataset model = new DynamicsEntityDataset().withDescription("ixpqj") - .withStructure("dataifhb") - .withSchema("dataldtt") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hqclnaihtgs") - .withParameters( - mapOf("mtrawrqkza", "datakvllrhtpmglxkoi", "paklw", "databun", "zltkaszfj", "datalweeprne"))) - .withParameters(mapOf("pg", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datadux"), "nwsb", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataebmvrdjom"), - "esdfedsb", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datalsflxk"), "i", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datancoinmphymc"))) - .withAnnotations(Arrays.asList("datatvdhqnufbxwe", "datai", "datantojovfnybydhuih")) - .withFolder(new DatasetFolder().withName("wud")) - .withEntityName("datacucrcm"); - model = BinaryData.fromObject(model).toObject(DynamicsEntityDataset.class); - Assertions.assertEquals("ixpqj", model.description()); - Assertions.assertEquals("hqclnaihtgs", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("pg").type()); - Assertions.assertEquals("wud", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java deleted file mode 100644 index 84f2d96f3e66..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsEntityDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DynamicsEntityDatasetTypeProperties; - -public final class DynamicsEntityDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsEntityDatasetTypeProperties model = BinaryData.fromString("{\"entityName\":\"datakirhn\"}") - .toObject(DynamicsEntityDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsEntityDatasetTypeProperties model - = new DynamicsEntityDatasetTypeProperties().withEntityName("datakirhn"); - model = BinaryData.fromObject(model).toObject(DynamicsEntityDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java deleted file mode 100644 index 826ec179b5e2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/DynamicsSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DynamicsSource; - -public final class DynamicsSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - DynamicsSource model = BinaryData.fromString( - "{\"type\":\"DynamicsSource\",\"query\":\"databqxlsam\",\"additionalColumns\":\"datajqh\",\"sourceRetryCount\":\"dataojnbbbgvmowy\",\"sourceRetryWait\":\"dataqhuhmldhnzsc\",\"maxConcurrentConnections\":\"datauzuchotdz\",\"disableMetricsCollection\":\"datahqhwpuaermaww\",\"\":{\"qcemco\":\"datada\",\"hisxz\":\"datawfuo\",\"oj\":\"dataikvdfszxbupsx\",\"zlmwfncwlwov\":\"datagxcgqkhyvtajwkrx\"}}") - .toObject(DynamicsSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - DynamicsSource model = new DynamicsSource().withSourceRetryCount("dataojnbbbgvmowy") - .withSourceRetryWait("dataqhuhmldhnzsc") - .withMaxConcurrentConnections("datauzuchotdz") - .withDisableMetricsCollection("datahqhwpuaermaww") - .withQuery("databqxlsam") - .withAdditionalColumns("datajqh"); - model = BinaryData.fromObject(model).toObject(DynamicsSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java deleted file mode 100644 index e1a679508537..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaObjectDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.EloquaObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class EloquaObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EloquaObjectDataset model = BinaryData.fromString( - "{\"type\":\"EloquaObject\",\"typeProperties\":{\"tableName\":\"dataltqmmij\"},\"description\":\"qfkwnaeikczscymq\",\"structure\":\"dataxgwpq\",\"schema\":\"dataumz\",\"linkedServiceName\":{\"referenceName\":\"pd\",\"parameters\":{\"zk\":\"datazvp\"}},\"parameters\":{\"wed\":{\"type\":\"Bool\",\"defaultValue\":\"datazbflbqmhbiyxx\"},\"lmsy\":{\"type\":\"SecureString\",\"defaultValue\":\"dataqbbseseayu\"},\"zk\":{\"type\":\"String\",\"defaultValue\":\"datacrolrzesbomp\"}},\"annotations\":[\"datanwjivtbusz\"],\"folder\":{\"name\":\"rdf\"},\"\":{\"sdeqngcaydzinlo\":\"dataywdal\",\"xrsi\":\"dataulpozmdahyc\"}}") - .toObject(EloquaObjectDataset.class); - Assertions.assertEquals("qfkwnaeikczscymq", model.description()); - Assertions.assertEquals("pd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("wed").type()); - Assertions.assertEquals("rdf", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EloquaObjectDataset model = new EloquaObjectDataset().withDescription("qfkwnaeikczscymq") - .withStructure("dataxgwpq") - .withSchema("dataumz") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("pd").withParameters(mapOf("zk", "datazvp"))) - .withParameters(mapOf("wed", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datazbflbqmhbiyxx"), "lmsy", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataqbbseseayu"), - "zk", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datacrolrzesbomp"))) - .withAnnotations(Arrays.asList("datanwjivtbusz")) - .withFolder(new DatasetFolder().withName("rdf")) - .withTableName("dataltqmmij"); - model = BinaryData.fromObject(model).toObject(EloquaObjectDataset.class); - Assertions.assertEquals("qfkwnaeikczscymq", model.description()); - Assertions.assertEquals("pd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("wed").type()); - Assertions.assertEquals("rdf", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java deleted file mode 100644 index 52de1094014e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EloquaSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.EloquaSource; - -public final class EloquaSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EloquaSource model = BinaryData.fromString( - "{\"type\":\"EloquaSource\",\"query\":\"dataxamxi\",\"queryTimeout\":\"datar\",\"additionalColumns\":\"datakglynbqpeoj\",\"sourceRetryCount\":\"databoggw\",\"sourceRetryWait\":\"datahtnywgtsodnxeir\",\"maxConcurrentConnections\":\"datawjimcfrhtzgduvoa\",\"disableMetricsCollection\":\"dataocalptfpbzyqb\",\"\":{\"xtbrqnyurxl\":\"datacyramvzu\",\"gx\":\"datauwxslzq\",\"urtnwbjjy\":\"databn\",\"aaoofltbsay\":\"dataupckhfbmdemohlsh\"}}") - .toObject(EloquaSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EloquaSource model = new EloquaSource().withSourceRetryCount("databoggw") - .withSourceRetryWait("datahtnywgtsodnxeir") - .withMaxConcurrentConnections("datawjimcfrhtzgduvoa") - .withDisableMetricsCollection("dataocalptfpbzyqb") - .withQueryTimeout("datar") - .withAdditionalColumns("datakglynbqpeoj") - .withQuery("dataxamxi"); - model = BinaryData.fromObject(model).toObject(EloquaSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java deleted file mode 100644 index a3fb0d02138d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EntityReferenceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.EntityReference; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeEntityReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class EntityReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EntityReference model - = BinaryData.fromString("{\"type\":\"LinkedServiceReference\",\"referenceName\":\"ggkztzttjn\"}") - .toObject(EntityReference.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, model.type()); - Assertions.assertEquals("ggkztzttjn", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EntityReference model - = new EntityReference().withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE) - .withReferenceName("ggkztzttjn"); - model = BinaryData.fromObject(model).toObject(EntityReference.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, model.type()); - Assertions.assertEquals("ggkztzttjn", model.referenceName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java deleted file mode 100644 index 11ef0e08b4b1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.EnvironmentVariableSetup; -import org.junit.jupiter.api.Assertions; - -public final class EnvironmentVariableSetupTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EnvironmentVariableSetup model = BinaryData.fromString( - "{\"type\":\"EnvironmentVariableSetup\",\"typeProperties\":{\"variableName\":\"ekbirhyvsyuv\",\"variableValue\":\"iemorszffiukltr\"}}") - .toObject(EnvironmentVariableSetup.class); - Assertions.assertEquals("ekbirhyvsyuv", model.variableName()); - Assertions.assertEquals("iemorszffiukltr", model.variableValue()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EnvironmentVariableSetup model - = new EnvironmentVariableSetup().withVariableName("ekbirhyvsyuv").withVariableValue("iemorszffiukltr"); - model = BinaryData.fromObject(model).toObject(EnvironmentVariableSetup.class); - Assertions.assertEquals("ekbirhyvsyuv", model.variableName()); - Assertions.assertEquals("iemorszffiukltr", model.variableValue()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java deleted file mode 100644 index 18805e5e6c55..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/EnvironmentVariableSetupTypePropertiesTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.EnvironmentVariableSetupTypeProperties; -import org.junit.jupiter.api.Assertions; - -public final class EnvironmentVariableSetupTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EnvironmentVariableSetupTypeProperties model - = BinaryData.fromString("{\"variableName\":\"gio\",\"variableValue\":\"uoxcsd\"}") - .toObject(EnvironmentVariableSetupTypeProperties.class); - Assertions.assertEquals("gio", model.variableName()); - Assertions.assertEquals("uoxcsd", model.variableValue()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EnvironmentVariableSetupTypeProperties model - = new EnvironmentVariableSetupTypeProperties().withVariableName("gio").withVariableValue("uoxcsd"); - model = BinaryData.fromObject(model).toObject(EnvironmentVariableSetupTypeProperties.class); - Assertions.assertEquals("gio", model.variableName()); - Assertions.assertEquals("uoxcsd", model.variableValue()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java deleted file mode 100644 index 48916347ceae..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.azure.resourcemanager.datafactory.models.ExcelDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ExcelDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExcelDataset model = BinaryData.fromString( - "{\"type\":\"Excel\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datal\",\"fileName\":\"datalhhfi\",\"\":{\"yldqpzfzxsox\":\"datafculzjrmhpfyw\",\"cqsxytqqtcmiw\":\"datanunjlzkdr\"}},\"sheetName\":\"datais\",\"sheetIndex\":\"datamey\",\"range\":\"datajamca\",\"firstRowAsHeader\":\"datahftpzcrrykll\",\"compression\":{\"type\":\"datanqanhkigglclwal\",\"level\":\"dataubhg\",\"\":{\"alec\":\"dataetxdqcmyctajqzj\",\"egyxsbfpzvoik\":\"databibiwks\",\"pblalhhez\":\"datantwczfzwushlc\",\"qdsgptotxj\":\"datafkissaidqzsaa\"}},\"nullValue\":\"dataia\"},\"description\":\"nlrtbfijzz\",\"structure\":\"datao\",\"schema\":\"dataolbuauktwieope\",\"linkedServiceName\":{\"referenceName\":\"e\",\"parameters\":{\"kgxyxyauxre\":\"datadwrswyiljpi\"}},\"parameters\":{\"wytkujsqy\":{\"type\":\"SecureString\",\"defaultValue\":\"datacnlt\"}},\"annotations\":[\"dataoxfab\",\"dataqgpwbmwhr\"],\"folder\":{\"name\":\"iwrycgnwplrrb\"},\"\":{\"hqvumspbfs\":\"datatsbbibtic\",\"xmzrmtmvwitu\":\"dataeqbbewfcuqfpy\"}}") - .toObject(ExcelDataset.class); - Assertions.assertEquals("nlrtbfijzz", model.description()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("wytkujsqy").type()); - Assertions.assertEquals("iwrycgnwplrrb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExcelDataset model = new ExcelDataset().withDescription("nlrtbfijzz") - .withStructure("datao") - .withSchema("dataolbuauktwieope") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("kgxyxyauxre", "datadwrswyiljpi"))) - .withParameters(mapOf("wytkujsqy", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacnlt"))) - .withAnnotations(Arrays.asList("dataoxfab", "dataqgpwbmwhr")) - .withFolder(new DatasetFolder().withName("iwrycgnwplrrb")) - .withLocation(new DatasetLocation().withFolderPath("datal") - .withFileName("datalhhfi") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withSheetName("datais") - .withSheetIndex("datamey") - .withRange("datajamca") - .withFirstRowAsHeader("datahftpzcrrykll") - .withCompression(new DatasetCompression().withType("datanqanhkigglclwal") - .withLevel("dataubhg") - .withAdditionalProperties(mapOf())) - .withNullValue("dataia"); - model = BinaryData.fromObject(model).toObject(ExcelDataset.class); - Assertions.assertEquals("nlrtbfijzz", model.description()); - Assertions.assertEquals("e", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("wytkujsqy").type()); - Assertions.assertEquals("iwrycgnwplrrb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java deleted file mode 100644 index 4968ca7c0463..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelDatasetTypePropertiesTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ExcelDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import java.util.HashMap; -import java.util.Map; - -public final class ExcelDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExcelDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datayyjshcybwfuppo\",\"fileName\":\"datarcmvouujx\",\"\":{\"vlrxikjmoywlun\":\"datakmoxrezsv\"}},\"sheetName\":\"datapcwybsz\",\"sheetIndex\":\"datacnpatpft\",\"range\":\"dataebwf\",\"firstRowAsHeader\":\"dataxorpwal\",\"compression\":{\"type\":\"datawugexojfccyl\",\"level\":\"datarh\",\"\":{\"zzxezmnrkj\":\"dataazjpwexcdrzpro\",\"lokfpmijpdvzv\":\"datapjeuxsp\",\"rwyambhbafebzxfk\":\"databhwbdqufvcgnrgla\"}},\"nullValue\":\"datautibhlen\"}") - .toObject(ExcelDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExcelDatasetTypeProperties model = new ExcelDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datayyjshcybwfuppo") - .withFileName("datarcmvouujx") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withSheetName("datapcwybsz") - .withSheetIndex("datacnpatpft") - .withRange("dataebwf") - .withFirstRowAsHeader("dataxorpwal") - .withCompression(new DatasetCompression().withType("datawugexojfccyl") - .withLevel("datarh") - .withAdditionalProperties(mapOf())) - .withNullValue("datautibhlen"); - model = BinaryData.fromObject(model).toObject(ExcelDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java deleted file mode 100644 index 7375ea28ebaa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExcelSourceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExcelSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class ExcelSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExcelSource model = BinaryData.fromString( - "{\"type\":\"ExcelSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datapx\",\"disableMetricsCollection\":\"datayhivh\",\"\":{\"brdnovuduwwjo\":\"dataqxyfbkazylay\"}},\"additionalColumns\":\"datacnrkmcivhwwhy\",\"sourceRetryCount\":\"datahwbdfcfyzw\",\"sourceRetryWait\":\"datarjfsq\",\"maxConcurrentConnections\":\"datafqaqoveqowqod\",\"disableMetricsCollection\":\"datapasxwiic\",\"\":{\"dow\":\"datajhhad\"}}") - .toObject(ExcelSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExcelSource model = new ExcelSource().withSourceRetryCount("datahwbdfcfyzw") - .withSourceRetryWait("datarjfsq") - .withMaxConcurrentConnections("datafqaqoveqowqod") - .withDisableMetricsCollection("datapasxwiic") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datapx") - .withDisableMetricsCollection("datayhivh") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("datacnrkmcivhwwhy"); - model = BinaryData.fromObject(model).toObject(ExcelSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java deleted file mode 100644 index dbc3a1c7d955..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecuteDataFlowActivityTypePropertiesComputeTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute; - -public final class ExecuteDataFlowActivityTypePropertiesComputeTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExecuteDataFlowActivityTypePropertiesCompute model - = BinaryData.fromString("{\"computeType\":\"dataxe\",\"coreCount\":\"datahvccxuntghwcb\"}") - .toObject(ExecuteDataFlowActivityTypePropertiesCompute.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExecuteDataFlowActivityTypePropertiesCompute model - = new ExecuteDataFlowActivityTypePropertiesCompute().withComputeType("dataxe") - .withCoreCount("datahvccxuntghwcb"); - model = BinaryData.fromObject(model).toObject(ExecuteDataFlowActivityTypePropertiesCompute.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java deleted file mode 100644 index a90f009bede5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityPolicyTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExecutePipelineActivityPolicy; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ExecutePipelineActivityPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExecutePipelineActivityPolicy model = BinaryData.fromString( - "{\"secureInput\":true,\"\":{\"j\":\"datahy\",\"bjsvuqkbs\":\"datamxbghxiotlf\",\"ugdyfyjeex\":\"dataicoaysargqkgaus\"}}") - .toObject(ExecutePipelineActivityPolicy.class); - Assertions.assertEquals(true, model.secureInput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExecutePipelineActivityPolicy model - = new ExecutePipelineActivityPolicy().withSecureInput(true).withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(ExecutePipelineActivityPolicy.class); - Assertions.assertEquals(true, model.secureInput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java deleted file mode 100644 index 2abc806e228d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTests.java +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.ExecutePipelineActivity; -import com.azure.resourcemanager.datafactory.models.ExecutePipelineActivityPolicy; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ExecutePipelineActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExecutePipelineActivity model = BinaryData.fromString( - "{\"type\":\"ExecutePipeline\",\"policy\":{\"secureInput\":false,\"\":{\"fxx\":\"datawf\"}},\"typeProperties\":{\"pipeline\":{\"referenceName\":\"oyisk\",\"name\":\"asxak\"},\"parameters\":{\"kgccldxuweweeeg\":\"datawjtnfaum\",\"hbcrib\":\"datazwsnryfaprhfc\",\"nfrhbkn\":\"datadszuxhaqlywty\",\"xhfg\":\"dataagpnmcqud\"},\"waitOnCompletion\":false},\"name\":\"egm\",\"description\":\"ebzoujhijlduuvxk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ordzwbskf\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Completed\",\"Succeeded\"],\"\":{\"pmowlsrxytev\":\"dataojzlia\",\"nzzhyl\":\"dataqxpmfhehtrpql\"}},{\"activity\":\"zuxqqrmck\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"zg\":\"dataeutbym\"}},{\"activity\":\"zhbnbnjpiecnriv\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\"],\"\":{\"uvpcjyh\":\"datahsuhkik\",\"anrirrnqloomsy\":\"dataznmj\",\"gdvknqui\":\"datayqgaska\",\"jzborwzpfgks\":\"dataipgvfchzcpv\"}},{\"activity\":\"vbrxlsbgl\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Completed\",\"Failed\"],\"\":{\"cabsmrfx\":\"dataneopxdbhcfswpdar\"}}],\"userProperties\":[{\"name\":\"vzgwvmhbiziij\",\"value\":\"dataseexdboatvsfyxdf\"},{\"name\":\"qrnawnqy\",\"value\":\"datagfxacojca\"},{\"name\":\"axor\",\"value\":\"dataj\"},{\"name\":\"oyngxogqvwchyn\",\"value\":\"datadtnaptwmawypk\"}],\"\":{\"gvvizaygtbmluyyc\":\"dataidoqvcjspjpmt\",\"uijpay\":\"datazvqpnjqpwxf\",\"i\":\"datalnz\",\"n\":\"dataqopwmbdleegwlh\"}}") - .toObject(ExecutePipelineActivity.class); - Assertions.assertEquals("egm", model.name()); - Assertions.assertEquals("ebzoujhijlduuvxk", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ordzwbskf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vzgwvmhbiziij", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals("oyisk", model.pipeline().referenceName()); - Assertions.assertEquals("asxak", model.pipeline().name()); - Assertions.assertEquals(false, model.waitOnCompletion()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExecutePipelineActivity model = new ExecutePipelineActivity().withName("egm") - .withDescription("ebzoujhijlduuvxk") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ordzwbskf") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zuxqqrmck") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zhbnbnjpiecnriv") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vbrxlsbgl") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("vzgwvmhbiziij").withValue("dataseexdboatvsfyxdf"), - new UserProperty().withName("qrnawnqy").withValue("datagfxacojca"), - new UserProperty().withName("axor").withValue("dataj"), - new UserProperty().withName("oyngxogqvwchyn").withValue("datadtnaptwmawypk"))) - .withPolicy(new ExecutePipelineActivityPolicy().withSecureInput(false).withAdditionalProperties(mapOf())) - .withPipeline(new PipelineReference().withReferenceName("oyisk").withName("asxak")) - .withParameters(mapOf("kgccldxuweweeeg", "datawjtnfaum", "hbcrib", "datazwsnryfaprhfc", "nfrhbkn", - "datadszuxhaqlywty", "xhfg", "dataagpnmcqud")) - .withWaitOnCompletion(false); - model = BinaryData.fromObject(model).toObject(ExecutePipelineActivity.class); - Assertions.assertEquals("egm", model.name()); - Assertions.assertEquals("ebzoujhijlduuvxk", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ordzwbskf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vzgwvmhbiziij", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals("oyisk", model.pipeline().referenceName()); - Assertions.assertEquals("asxak", model.pipeline().name()); - Assertions.assertEquals(false, model.waitOnCompletion()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java deleted file mode 100644 index ddc520ba41e7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutePipelineActivityTypePropertiesTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ExecutePipelineActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ExecutePipelineActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExecutePipelineActivityTypeProperties model = BinaryData.fromString( - "{\"pipeline\":{\"referenceName\":\"lkxvfejdgoj\",\"name\":\"qez\"},\"parameters\":{\"s\":\"datavayyyowjpsmnxcc\",\"a\":\"datahlokhmkqy\",\"lmwzkxaglwd\":\"dataddwfhfjfato\",\"hvioccszdaxafu\":\"datatjfnmxzu\"},\"waitOnCompletion\":false}") - .toObject(ExecutePipelineActivityTypeProperties.class); - Assertions.assertEquals("lkxvfejdgoj", model.pipeline().referenceName()); - Assertions.assertEquals("qez", model.pipeline().name()); - Assertions.assertEquals(false, model.waitOnCompletion()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExecutePipelineActivityTypeProperties model = new ExecutePipelineActivityTypeProperties() - .withPipeline(new PipelineReference().withReferenceName("lkxvfejdgoj").withName("qez")) - .withParameters(mapOf("s", "datavayyyowjpsmnxcc", "a", "datahlokhmkqy", "lmwzkxaglwd", "dataddwfhfjfato", - "hvioccszdaxafu", "datatjfnmxzu")) - .withWaitOnCompletion(false); - model = BinaryData.fromObject(model).toObject(ExecutePipelineActivityTypeProperties.class); - Assertions.assertEquals("lkxvfejdgoj", model.pipeline().referenceName()); - Assertions.assertEquals("qez", model.pipeline().name()); - Assertions.assertEquals(false, model.waitOnCompletion()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java deleted file mode 100644 index a7f71947d422..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExecutionActivityTests.java +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.ExecutionActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ExecutionActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExecutionActivity model = BinaryData.fromString( - "{\"type\":\"Execution\",\"linkedServiceName\":{\"referenceName\":\"nvfcdsi\",\"parameters\":{\"x\":\"datanybnt\"}},\"policy\":{\"timeout\":\"dataxujzox\",\"retry\":\"datakqtkrjuc\",\"retryIntervalInSeconds\":2028416935,\"secureInput\":true,\"secureOutput\":true,\"\":{\"cem\":\"datalx\",\"mmgb\":\"dataoanudvqannenxgo\",\"wfxjiqpd\":\"dataqyfgzbfpqifsgzfg\"}},\"name\":\"ohwnnayo\",\"description\":\"hjxhpppnksqiwl\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"eohoygowdzizzu\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"sepiplpexhh\":\"datappqvovsirt\",\"tl\":\"dataj\",\"biroxamnvrcq\":\"datap\",\"tzmmxdmppibv\":\"datapbainsihdzxeufjy\"}},{\"activity\":\"qjcphofyoqfvm\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"tyxlsycckj\":\"datamuzltnneolghgte\",\"c\":\"dataqdtykcaocjuuxqnk\",\"ynjcvewl\":\"datamrwc\",\"pigla\":\"datajrnaktj\"}},{\"activity\":\"heqjc\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"edvbrbdujpsh\":\"datacztrqnuahh\"}}],\"userProperties\":[{\"name\":\"ivrycyulwzjorv\",\"value\":\"datagmkkeles\"},{\"name\":\"jckyocyb\",\"value\":\"datalwdmpsdfsla\"},{\"name\":\"ktnhyylipvqpqu\",\"value\":\"dataexnpoaeruywfh\"},{\"name\":\"twlzlmpiprlcgq\",\"value\":\"datawmn\"}],\"\":{\"kpapepwpbnj\":\"dataqlybmkmxus\",\"tun\":\"datako\",\"vnddllbwnmv\":\"dataumy\"}}") - .toObject(ExecutionActivity.class); - Assertions.assertEquals("ohwnnayo", model.name()); - Assertions.assertEquals("hjxhpppnksqiwl", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("eohoygowdzizzu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ivrycyulwzjorv", model.userProperties().get(0).name()); - Assertions.assertEquals("nvfcdsi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2028416935, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExecutionActivity model = new ExecutionActivity().withName("ohwnnayo") - .withDescription("hjxhpppnksqiwl") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("eohoygowdzizzu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qjcphofyoqfvm") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("heqjc") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ivrycyulwzjorv").withValue("datagmkkeles"), - new UserProperty().withName("jckyocyb").withValue("datalwdmpsdfsla"), - new UserProperty().withName("ktnhyylipvqpqu").withValue("dataexnpoaeruywfh"), - new UserProperty().withName("twlzlmpiprlcgq").withValue("datawmn"))) - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("nvfcdsi").withParameters(mapOf("x", "datanybnt"))) - .withPolicy(new ActivityPolicy().withTimeout("dataxujzox") - .withRetry("datakqtkrjuc") - .withRetryIntervalInSeconds(2028416935) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(ExecutionActivity.class); - Assertions.assertEquals("ohwnnayo", model.name()); - Assertions.assertEquals("hjxhpppnksqiwl", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("eohoygowdzizzu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ivrycyulwzjorv", model.userProperties().get(0).name()); - Assertions.assertEquals("nvfcdsi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(2028416935, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java deleted file mode 100644 index c9a946fc0d3c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExportSettingsTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExportSettings; -import java.util.HashMap; -import java.util.Map; - -public final class ExportSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExportSettings model = BinaryData.fromString( - "{\"type\":\"ExportSettings\",\"\":{\"tfscoupsfeywbhxh\":\"dataw\",\"wjjotfunsd\":\"datawkwcfkcqexdwecvk\",\"fuobx\":\"dataejxvrwalekqed\"}}") - .toObject(ExportSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExportSettings model = new ExportSettings().withAdditionalProperties(mapOf("type", "ExportSettings")); - model = BinaryData.fromObject(model).toObject(ExportSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java deleted file mode 100644 index d84f84957610..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchRequestTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest; -import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ExposureControlBatchRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExposureControlBatchRequest model = BinaryData - .fromString("{\"exposureControlRequests\":[{\"featureName\":\"fbuhfmvfaxkffe\",\"featureType\":\"th\"}]}") - .toObject(ExposureControlBatchRequest.class); - Assertions.assertEquals("fbuhfmvfaxkffe", model.exposureControlRequests().get(0).featureName()); - Assertions.assertEquals("th", model.exposureControlRequests().get(0).featureType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExposureControlBatchRequest model = new ExposureControlBatchRequest().withExposureControlRequests( - Arrays.asList(new ExposureControlRequest().withFeatureName("fbuhfmvfaxkffe").withFeatureType("th"))); - model = BinaryData.fromObject(model).toObject(ExposureControlBatchRequest.class); - Assertions.assertEquals("fbuhfmvfaxkffe", model.exposureControlRequests().get(0).featureName()); - Assertions.assertEquals("th", model.exposureControlRequests().get(0).featureType()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java deleted file mode 100644 index c5db19883a28..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlBatchResponseInnerTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ExposureControlBatchResponseInner; -import com.azure.resourcemanager.datafactory.fluent.models.ExposureControlResponseInner; -import java.util.Arrays; - -public final class ExposureControlBatchResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExposureControlBatchResponseInner model = BinaryData.fromString( - "{\"exposureControlResponses\":[{\"featureName\":\"ez\",\"value\":\"shxmzsbbzoggigrx\"},{\"featureName\":\"ur\",\"value\":\"xxjnspydptk\"}]}") - .toObject(ExposureControlBatchResponseInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExposureControlBatchResponseInner model = new ExposureControlBatchResponseInner().withExposureControlResponses( - Arrays.asList(new ExposureControlResponseInner(), new ExposureControlResponseInner())); - model = BinaryData.fromObject(model).toObject(ExposureControlBatchResponseInner.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java deleted file mode 100644 index a33d034da583..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlRequestTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import org.junit.jupiter.api.Assertions; - -public final class ExposureControlRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExposureControlRequest model - = BinaryData.fromString("{\"featureName\":\"mvxi\",\"featureType\":\"uugidyjrrfby\"}") - .toObject(ExposureControlRequest.class); - Assertions.assertEquals("mvxi", model.featureName()); - Assertions.assertEquals("uugidyjrrfby", model.featureType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExposureControlRequest model - = new ExposureControlRequest().withFeatureName("mvxi").withFeatureType("uugidyjrrfby"); - model = BinaryData.fromObject(model).toObject(ExposureControlRequest.class); - Assertions.assertEquals("mvxi", model.featureName()); - Assertions.assertEquals("uugidyjrrfby", model.featureType()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java deleted file mode 100644 index dec1e0d826d3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlResponseInnerTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ExposureControlResponseInner; - -public final class ExposureControlResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExposureControlResponseInner model - = BinaryData.fromString("{\"featureName\":\"svexcsonpclhoco\",\"value\":\"lkevle\"}") - .toObject(ExposureControlResponseInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExposureControlResponseInner model = new ExposureControlResponseInner(); - model = BinaryData.fromObject(model).toObject(ExposureControlResponseInner.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java deleted file mode 100644 index 368603d381e4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueByFactoryWithResponseMockTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import com.azure.resourcemanager.datafactory.models.ExposureControlResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ExposureControlsGetFeatureValueByFactoryWithResponseMockTests { - @Test - public void testGetFeatureValueByFactoryWithResponse() throws Exception { - String responseStr = "{\"featureName\":\"zub\",\"value\":\"uimincc\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ExposureControlResponse response = manager.exposureControls() - .getFeatureValueByFactoryWithResponse("aufabtpcbnt", "jnkvs", - new ExposureControlRequest().withFeatureName("iphllw").withFeatureType("dzodvzbxtzgxdxqe"), - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java deleted file mode 100644 index 3098404cbdfb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsGetFeatureValueWithResponseMockTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import com.azure.resourcemanager.datafactory.models.ExposureControlResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ExposureControlsGetFeatureValueWithResponseMockTests { - @Test - public void testGetFeatureValueWithResponse() throws Exception { - String responseStr = "{\"featureName\":\"xlxvopcuw\",\"value\":\"sytt\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ExposureControlResponse response = manager.exposureControls() - .getFeatureValueWithResponse("jwtfohcylvjzuf", - new ExposureControlRequest().withFeatureName("aedzy").withFeatureType("rufwdbimjudp"), - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java deleted file mode 100644 index 1bc353aa818d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest; -import com.azure.resourcemanager.datafactory.models.ExposureControlBatchResponse; -import com.azure.resourcemanager.datafactory.models.ExposureControlRequest; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ExposureControlsQueryFeatureValuesByFactoryWithResponseMockTests { - @Test - public void testQueryFeatureValuesByFactoryWithResponse() throws Exception { - String responseStr = "{\"exposureControlResponses\":[{\"featureName\":\"bdozdkrmplj\",\"value\":\"zvlu\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ExposureControlBatchResponse response = manager.exposureControls() - .queryFeatureValuesByFactoryWithResponse("uby", "rhencgfzhbtzu", - new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList( - new ExposureControlRequest().withFeatureName("tchxtbcqjvyz").withFeatureType("xkhyvjomq"), - new ExposureControlRequest().withFeatureName("oatzmrwlsr").withFeatureType("ajlrglmjruf"), - new ExposureControlRequest().withFeatureName("pnm").withFeatureType("e"), - new ExposureControlRequest().withFeatureName("pytfdzkbkyt").withFeatureType("twwk"))), - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java deleted file mode 100644 index 373bcdeb6847..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Expression; -import org.junit.jupiter.api.Assertions; - -public final class ExpressionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Expression model = BinaryData.fromString("{\"value\":\"ytxtdgu\"}").toObject(Expression.class); - Assertions.assertEquals("ytxtdgu", model.value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Expression model = new Expression().withValue("ytxtdgu"); - model = BinaryData.fromObject(model).toObject(Expression.class); - Assertions.assertEquals("ytxtdgu", model.value()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java deleted file mode 100644 index 62cb7ae42d61..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ExpressionV2Tests.java +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExpressionV2; -import com.azure.resourcemanager.datafactory.models.ExpressionV2Type; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ExpressionV2Tests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ExpressionV2 model = BinaryData.fromString( - "{\"type\":\"Field\",\"value\":\"vyosmxov\",\"operators\":[\"bahxcwjqtfsxcak\",\"ezdvne\"],\"operands\":[{\"type\":\"Unary\",\"value\":\"ejwqeypaoa\",\"operators\":[\"whiyus\",\"hmjlkknwsja\",\"mrnrhsv\",\"jnlerm\"],\"operands\":[{\"type\":\"Field\",\"value\":\"cqynvfekjvclbkk\",\"operators\":[\"rbd\",\"s\",\"ftqah\",\"vpmwnmuzcjnkaw\"],\"operands\":[{},{},{}]}]}]}") - .toObject(ExpressionV2.class); - Assertions.assertEquals(ExpressionV2Type.FIELD, model.type()); - Assertions.assertEquals("vyosmxov", model.value()); - Assertions.assertEquals("bahxcwjqtfsxcak", model.operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.operands().get(0).type()); - Assertions.assertEquals("ejwqeypaoa", model.operands().get(0).value()); - Assertions.assertEquals("whiyus", model.operands().get(0).operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.FIELD, model.operands().get(0).operands().get(0).type()); - Assertions.assertEquals("cqynvfekjvclbkk", model.operands().get(0).operands().get(0).value()); - Assertions.assertEquals("rbd", model.operands().get(0).operands().get(0).operators().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ExpressionV2 model = new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("vyosmxov") - .withOperators(Arrays.asList("bahxcwjqtfsxcak", "ezdvne")) - .withOperands(Arrays.asList(new ExpressionV2().withType(ExpressionV2Type.UNARY) - .withValue("ejwqeypaoa") - .withOperators(Arrays.asList("whiyus", "hmjlkknwsja", "mrnrhsv", "jnlerm")) - .withOperands(Arrays.asList(new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("cqynvfekjvclbkk") - .withOperators(Arrays.asList("rbd", "s", "ftqah", "vpmwnmuzcjnkaw")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())))))); - model = BinaryData.fromObject(model).toObject(ExpressionV2.class); - Assertions.assertEquals(ExpressionV2Type.FIELD, model.type()); - Assertions.assertEquals("vyosmxov", model.value()); - Assertions.assertEquals("bahxcwjqtfsxcak", model.operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.UNARY, model.operands().get(0).type()); - Assertions.assertEquals("ejwqeypaoa", model.operands().get(0).value()); - Assertions.assertEquals("whiyus", model.operands().get(0).operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.FIELD, model.operands().get(0).operands().get(0).type()); - Assertions.assertEquals("cqynvfekjvclbkk", model.operands().get(0).operands().get(0).value()); - Assertions.assertEquals("rbd", model.operands().get(0).operands().get(0).operators().get(0)); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java deleted file mode 100644 index f5cf07825bc6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoriesDeleteByResourceGroupWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class FactoriesDeleteByResourceGroupWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.factories() - .deleteByResourceGroupWithResponse("lrumvixbyedcavv", "pjnnhaf", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java deleted file mode 100644 index f08b14dc1952..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryIdentityTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FactoryIdentity; -import com.azure.resourcemanager.datafactory.models.FactoryIdentityType; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FactoryIdentityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryIdentity model = BinaryData.fromString( - "{\"type\":\"SystemAssigned\",\"principalId\":\"e7ce8104-99be-4f7c-a3d9-61b241524eed\",\"tenantId\":\"e6580028-d4aa-4bb5-8921-8d2c6828ca9e\",\"userAssignedIdentities\":{\"qjpkcattpngjcrc\":\"dataleyyvx\"}}") - .toObject(FactoryIdentity.class); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryIdentity model = new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED) - .withUserAssignedIdentities(mapOf("qjpkcattpngjcrc", "dataleyyvx")); - model = BinaryData.fromObject(model).toObject(FactoryIdentity.class); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java deleted file mode 100644 index f2c5e67cc70b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoConfigurationTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FactoryRepoConfiguration; -import org.junit.jupiter.api.Assertions; - -public final class FactoryRepoConfigurationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryRepoConfiguration model = BinaryData.fromString( - "{\"type\":\"FactoryRepoConfiguration\",\"accountName\":\"o\",\"repositoryName\":\"rq\",\"collaborationBranch\":\"b\",\"rootFolder\":\"oczvy\",\"lastCommitId\":\"qrvkdv\",\"disablePublish\":false}") - .toObject(FactoryRepoConfiguration.class); - Assertions.assertEquals("o", model.accountName()); - Assertions.assertEquals("rq", model.repositoryName()); - Assertions.assertEquals("b", model.collaborationBranch()); - Assertions.assertEquals("oczvy", model.rootFolder()); - Assertions.assertEquals("qrvkdv", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryRepoConfiguration model = new FactoryRepoConfiguration().withAccountName("o") - .withRepositoryName("rq") - .withCollaborationBranch("b") - .withRootFolder("oczvy") - .withLastCommitId("qrvkdv") - .withDisablePublish(false); - model = BinaryData.fromObject(model).toObject(FactoryRepoConfiguration.class); - Assertions.assertEquals("o", model.accountName()); - Assertions.assertEquals("rq", model.repositoryName()); - Assertions.assertEquals("b", model.collaborationBranch()); - Assertions.assertEquals("oczvy", model.rootFolder()); - Assertions.assertEquals("qrvkdv", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java deleted file mode 100644 index 97fe44a9e3db..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryRepoUpdateTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FactoryRepoConfiguration; -import com.azure.resourcemanager.datafactory.models.FactoryRepoUpdate; -import org.junit.jupiter.api.Assertions; - -public final class FactoryRepoUpdateTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryRepoUpdate model = BinaryData.fromString( - "{\"factoryResourceId\":\"qzbqjvsov\",\"repoConfiguration\":{\"type\":\"FactoryRepoConfiguration\",\"accountName\":\"okacspk\",\"repositoryName\":\"lhzdobp\",\"collaborationBranch\":\"jmflbvvnch\",\"rootFolder\":\"kcciwwzjuqkhr\",\"lastCommitId\":\"jiwkuofoskghsau\",\"disablePublish\":true}}") - .toObject(FactoryRepoUpdate.class); - Assertions.assertEquals("qzbqjvsov", model.factoryResourceId()); - Assertions.assertEquals("okacspk", model.repoConfiguration().accountName()); - Assertions.assertEquals("lhzdobp", model.repoConfiguration().repositoryName()); - Assertions.assertEquals("jmflbvvnch", model.repoConfiguration().collaborationBranch()); - Assertions.assertEquals("kcciwwzjuqkhr", model.repoConfiguration().rootFolder()); - Assertions.assertEquals("jiwkuofoskghsau", model.repoConfiguration().lastCommitId()); - Assertions.assertEquals(true, model.repoConfiguration().disablePublish()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryRepoUpdate model = new FactoryRepoUpdate().withFactoryResourceId("qzbqjvsov") - .withRepoConfiguration(new FactoryRepoConfiguration().withAccountName("okacspk") - .withRepositoryName("lhzdobp") - .withCollaborationBranch("jmflbvvnch") - .withRootFolder("kcciwwzjuqkhr") - .withLastCommitId("jiwkuofoskghsau") - .withDisablePublish(true)); - model = BinaryData.fromObject(model).toObject(FactoryRepoUpdate.class); - Assertions.assertEquals("qzbqjvsov", model.factoryResourceId()); - Assertions.assertEquals("okacspk", model.repoConfiguration().accountName()); - Assertions.assertEquals("lhzdobp", model.repoConfiguration().repositoryName()); - Assertions.assertEquals("jmflbvvnch", model.repoConfiguration().collaborationBranch()); - Assertions.assertEquals("kcciwwzjuqkhr", model.repoConfiguration().rootFolder()); - Assertions.assertEquals("jiwkuofoskghsau", model.repoConfiguration().lastCommitId()); - Assertions.assertEquals(true, model.repoConfiguration().disablePublish()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java deleted file mode 100644 index 1700339ce8be..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdateParametersTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FactoryIdentity; -import com.azure.resourcemanager.datafactory.models.FactoryIdentityType; -import com.azure.resourcemanager.datafactory.models.FactoryUpdateParameters; -import com.azure.resourcemanager.datafactory.models.PublicNetworkAccess; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FactoryUpdateParametersTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryUpdateParameters model = BinaryData.fromString( - "{\"tags\":{\"bldngkpoc\":\"kouknvudwtiu\",\"npiucgygevqznty\":\"pazyxoegukg\"},\"identity\":{\"type\":\"SystemAssigned\",\"principalId\":\"3a021020-4b0b-49ed-8cdb-62c22f113895\",\"tenantId\":\"11daabc0-cfda-46b4-a794-f99a9c2225fd\",\"userAssignedIdentities\":{\"r\":\"datac\",\"dpydn\":\"dataj\",\"sjttgzfbish\":\"datayhxdeoejzicwi\",\"jdeyeamdpha\":\"databkh\"}},\"properties\":{\"publicNetworkAccess\":\"Disabled\"}}") - .toObject(FactoryUpdateParameters.class); - Assertions.assertEquals("kouknvudwtiu", model.tags().get("bldngkpoc")); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.identity().type()); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryUpdateParameters model = new FactoryUpdateParameters() - .withTags(mapOf("bldngkpoc", "kouknvudwtiu", "npiucgygevqznty", "pazyxoegukg")) - .withIdentity(new FactoryIdentity().withType(FactoryIdentityType.SYSTEM_ASSIGNED) - .withUserAssignedIdentities(mapOf("r", "datac", "dpydn", "dataj", "sjttgzfbish", "datayhxdeoejzicwi", - "jdeyeamdpha", "databkh"))) - .withPublicNetworkAccess(PublicNetworkAccess.DISABLED); - model = BinaryData.fromObject(model).toObject(FactoryUpdateParameters.class); - Assertions.assertEquals("kouknvudwtiu", model.tags().get("bldngkpoc")); - Assertions.assertEquals(FactoryIdentityType.SYSTEM_ASSIGNED, model.identity().type()); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java deleted file mode 100644 index 3f0fc96403a5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryUpdatePropertiesTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.FactoryUpdateProperties; -import com.azure.resourcemanager.datafactory.models.PublicNetworkAccess; -import org.junit.jupiter.api.Assertions; - -public final class FactoryUpdatePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryUpdateProperties model - = BinaryData.fromString("{\"publicNetworkAccess\":\"Disabled\"}").toObject(FactoryUpdateProperties.class); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryUpdateProperties model - = new FactoryUpdateProperties().withPublicNetworkAccess(PublicNetworkAccess.DISABLED); - model = BinaryData.fromObject(model).toObject(FactoryUpdateProperties.class); - Assertions.assertEquals(PublicNetworkAccess.DISABLED, model.publicNetworkAccess()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java deleted file mode 100644 index 7597801e1aca..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FactoryVstsConfigurationTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FactoryVstsConfiguration; -import org.junit.jupiter.api.Assertions; - -public final class FactoryVstsConfigurationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FactoryVstsConfiguration model = BinaryData.fromString( - "{\"type\":\"FactoryVSTSConfiguration\",\"projectName\":\"qv\",\"tenantId\":\"makli\",\"accountName\":\"qcahyhxal\",\"repositoryName\":\"bx\",\"collaborationBranch\":\"woijpodtbl\",\"rootFolder\":\"pkkwj\",\"lastCommitId\":\"odqhyk\",\"disablePublish\":false}") - .toObject(FactoryVstsConfiguration.class); - Assertions.assertEquals("qcahyhxal", model.accountName()); - Assertions.assertEquals("bx", model.repositoryName()); - Assertions.assertEquals("woijpodtbl", model.collaborationBranch()); - Assertions.assertEquals("pkkwj", model.rootFolder()); - Assertions.assertEquals("odqhyk", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - Assertions.assertEquals("qv", model.projectName()); - Assertions.assertEquals("makli", model.tenantId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FactoryVstsConfiguration model = new FactoryVstsConfiguration().withAccountName("qcahyhxal") - .withRepositoryName("bx") - .withCollaborationBranch("woijpodtbl") - .withRootFolder("pkkwj") - .withLastCommitId("odqhyk") - .withDisablePublish(false) - .withProjectName("qv") - .withTenantId("makli"); - model = BinaryData.fromObject(model).toObject(FactoryVstsConfiguration.class); - Assertions.assertEquals("qcahyhxal", model.accountName()); - Assertions.assertEquals("bx", model.repositoryName()); - Assertions.assertEquals("woijpodtbl", model.collaborationBranch()); - Assertions.assertEquals("pkkwj", model.rootFolder()); - Assertions.assertEquals("odqhyk", model.lastCommitId()); - Assertions.assertEquals(false, model.disablePublish()); - Assertions.assertEquals("qv", model.projectName()); - Assertions.assertEquals("makli", model.tenantId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java deleted file mode 100644 index 29c7da352227..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerLocationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FileServerLocation; - -public final class FileServerLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileServerLocation model = BinaryData.fromString( - "{\"type\":\"FileServerLocation\",\"folderPath\":\"databbcngkeg\",\"fileName\":\"datay\",\"\":{\"zox\":\"databfetwily\"}}") - .toObject(FileServerLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileServerLocation model = new FileServerLocation().withFolderPath("databbcngkeg").withFileName("datay"); - model = BinaryData.fromObject(model).toObject(FileServerLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java deleted file mode 100644 index a649f3e79617..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerReadSettingsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FileServerReadSettings; - -public final class FileServerReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileServerReadSettings model = BinaryData.fromString( - "{\"type\":\"FileServerReadSettings\",\"recursive\":\"datazznkrarjiijpbych\",\"wildcardFolderPath\":\"datahlhdz\",\"wildcardFileName\":\"datarlfcmeyxypx\",\"fileListPath\":\"datacrtm\",\"enablePartitionDiscovery\":\"datajz\",\"partitionRootPath\":\"dataetpdezebvtkgzjna\",\"deleteFilesAfterCompletion\":\"datahheilnhkcxuou\",\"modifiedDatetimeStart\":\"datazckogcvazoz\",\"modifiedDatetimeEnd\":\"datanyofts\",\"fileFilter\":\"dataiydwklkuamx\",\"maxConcurrentConnections\":\"datajtnqeo\",\"disableMetricsCollection\":\"dataiz\",\"\":{\"mkztwzlw\":\"datav\",\"pfe\":\"datallhm\",\"tgdvwatfvjvum\":\"datarfuzedud\",\"qvxnoazarivcze\":\"datambcflhycluounv\"}}") - .toObject(FileServerReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileServerReadSettings model = new FileServerReadSettings().withMaxConcurrentConnections("datajtnqeo") - .withDisableMetricsCollection("dataiz") - .withRecursive("datazznkrarjiijpbych") - .withWildcardFolderPath("datahlhdz") - .withWildcardFileName("datarlfcmeyxypx") - .withFileListPath("datacrtm") - .withEnablePartitionDiscovery("datajz") - .withPartitionRootPath("dataetpdezebvtkgzjna") - .withDeleteFilesAfterCompletion("datahheilnhkcxuou") - .withModifiedDatetimeStart("datazckogcvazoz") - .withModifiedDatetimeEnd("datanyofts") - .withFileFilter("dataiydwklkuamx"); - model = BinaryData.fromObject(model).toObject(FileServerReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java deleted file mode 100644 index df95f13aece0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileServerWriteSettingsTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FileServerWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class FileServerWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileServerWriteSettings model = BinaryData.fromString( - "{\"type\":\"FileServerWriteSettings\",\"maxConcurrentConnections\":\"datactjneibhztlzb\",\"disableMetricsCollection\":\"datayvjispkgkhvpvbz\",\"copyBehavior\":\"dataovhxstxs\",\"metadata\":[{\"name\":\"datavsd\",\"value\":\"datashhcdlsowyhxwh\"},{\"name\":\"datafgtwxmj\",\"value\":\"datazfi\"},{\"name\":\"datamwmce\",\"value\":\"datachrfm\"},{\"name\":\"datajxxwzdwmjurtno\",\"value\":\"dataiohdxyuk\"}],\"\":{\"izefajgble\":\"datafwykrpojenpsff\",\"cevfbiekydk\":\"dataxpeuahvxfn\",\"yrvdszrizpej\":\"datadkzfkneck\",\"mhzcicitykzy\":\"datayrzzxqtcgs\"}}") - .toObject(FileServerWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileServerWriteSettings model = new FileServerWriteSettings().withMaxConcurrentConnections("datactjneibhztlzb") - .withDisableMetricsCollection("datayvjispkgkhvpvbz") - .withCopyBehavior("dataovhxstxs") - .withMetadata(Arrays.asList(new MetadataItem().withName("datavsd").withValue("datashhcdlsowyhxwh"), - new MetadataItem().withName("datafgtwxmj").withValue("datazfi"), - new MetadataItem().withName("datamwmce").withValue("datachrfm"), - new MetadataItem().withName("datajxxwzdwmjurtno").withValue("dataiohdxyuk"))); - model = BinaryData.fromObject(model).toObject(FileServerWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java deleted file mode 100644 index e1c6ff758a90..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTests.java +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.azure.resourcemanager.datafactory.models.FileShareDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FileShareDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileShareDataset model = BinaryData.fromString( - "{\"type\":\"FileShare\",\"typeProperties\":{\"folderPath\":\"datahhlwtpk\",\"fileName\":\"datageattbzkg\",\"modifiedDatetimeStart\":\"dataqnwqstt\",\"modifiedDatetimeEnd\":\"datauvcysjeufjxf\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"dataitfnonp\",\"deserializer\":\"datauxlvrhprrvbwonl\",\"\":{\"huy\":\"datalvtlrvbst\",\"u\":\"datauninttlnrjdszd\",\"vgp\":\"dataiciqppo\",\"uhwfwj\":\"datae\"}},\"fileFilter\":\"dataxuoxtfnres\",\"compression\":{\"type\":\"dataepgckn\",\"level\":\"datamgvsnvbtqdxfmj\",\"\":{\"jluqllbsupu\":\"datan\"}}},\"description\":\"x\",\"structure\":\"datadlhzwhcu\",\"schema\":\"databosjjfd\",\"linkedServiceName\":{\"referenceName\":\"zhrjqfyaytvsly\",\"parameters\":{\"cjiwgsxfaio\":\"datagniuar\",\"ujjgnfgrzxbarc\":\"datawd\",\"jwenjcytesmf\":\"datapaefzqsy\"}},\"parameters\":{\"fzhhezvhjudxdyyr\":{\"type\":\"Bool\",\"defaultValue\":\"dataodqhuauzmzivrt\"}},\"annotations\":[\"dataahswtvdkxb\",\"datassgfenffdxbvwfqj\",\"datahivdrija\",\"dataxndmuvar\"],\"folder\":{\"name\":\"zj\"},\"\":{\"muhcuhtuzl\":\"datar\"}}") - .toObject(FileShareDataset.class); - Assertions.assertEquals("x", model.description()); - Assertions.assertEquals("zhrjqfyaytvsly", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("fzhhezvhjudxdyyr").type()); - Assertions.assertEquals("zj", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileShareDataset model = new FileShareDataset().withDescription("x") - .withStructure("datadlhzwhcu") - .withSchema("databosjjfd") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zhrjqfyaytvsly") - .withParameters( - mapOf("cjiwgsxfaio", "datagniuar", "ujjgnfgrzxbarc", "datawd", "jwenjcytesmf", "datapaefzqsy"))) - .withParameters(mapOf("fzhhezvhjudxdyyr", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataodqhuauzmzivrt"))) - .withAnnotations(Arrays.asList("dataahswtvdkxb", "datassgfenffdxbvwfqj", "datahivdrija", "dataxndmuvar")) - .withFolder(new DatasetFolder().withName("zj")) - .withFolderPath("datahhlwtpk") - .withFileName("datageattbzkg") - .withModifiedDatetimeStart("dataqnwqstt") - .withModifiedDatetimeEnd("datauvcysjeufjxf") - .withFormat(new DatasetStorageFormat().withSerializer("dataitfnonp") - .withDeserializer("datauxlvrhprrvbwonl") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withFileFilter("dataxuoxtfnres") - .withCompression(new DatasetCompression().withType("dataepgckn") - .withLevel("datamgvsnvbtqdxfmj") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(FileShareDataset.class); - Assertions.assertEquals("x", model.description()); - Assertions.assertEquals("zhrjqfyaytvsly", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("fzhhezvhjudxdyyr").type()); - Assertions.assertEquals("zj", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java deleted file mode 100644 index 5872a693dcd9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileShareDatasetTypePropertiesTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.FileShareDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class FileShareDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileShareDatasetTypeProperties model = BinaryData.fromString( - "{\"folderPath\":\"datawyopgarpfctwrapc\",\"fileName\":\"dataojqyvzes\",\"modifiedDatetimeStart\":\"dataiysnjqyowaadc\",\"modifiedDatetimeEnd\":\"dataazab\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datatsewkaupwhlz\",\"deserializer\":\"datakremgjl\",\"\":{\"kqakofaj\":\"datadorsirxxhy\",\"shixbcejopylbl\":\"datareprfvmkinwteyr\"}},\"fileFilter\":\"dataprrw\",\"compression\":{\"type\":\"datapimtc\",\"level\":\"datafx\",\"\":{\"liqemcdiiisklbo\":\"datatzfslxizhqikmgo\",\"rk\":\"dataxxupjxv\"}}}") - .toObject(FileShareDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileShareDatasetTypeProperties model = new FileShareDatasetTypeProperties() - .withFolderPath("datawyopgarpfctwrapc") - .withFileName("dataojqyvzes") - .withModifiedDatetimeStart("dataiysnjqyowaadc") - .withModifiedDatetimeEnd("dataazab") - .withFormat(new DatasetStorageFormat().withSerializer("datatsewkaupwhlz") - .withDeserializer("datakremgjl") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withFileFilter("dataprrw") - .withCompression( - new DatasetCompression().withType("datapimtc").withLevel("datafx").withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(FileShareDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java deleted file mode 100644 index 926614eb9ba5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FileSystemSink; - -public final class FileSystemSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileSystemSink model = BinaryData.fromString( - "{\"type\":\"FileSystemSink\",\"copyBehavior\":\"databadkzpqctuplpkje\",\"writeBatchSize\":\"datanrnzl\",\"writeBatchTimeout\":\"dataugoeftrbxoma\",\"sinkRetryCount\":\"datavarfqverxelquqze\",\"sinkRetryWait\":\"datavjmllzykalbaum\",\"maxConcurrentConnections\":\"datadwqiucpj\",\"disableMetricsCollection\":\"datatbss\",\"\":{\"t\":\"datawzelxrf\"}}") - .toObject(FileSystemSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileSystemSink model = new FileSystemSink().withWriteBatchSize("datanrnzl") - .withWriteBatchTimeout("dataugoeftrbxoma") - .withSinkRetryCount("datavarfqverxelquqze") - .withSinkRetryWait("datavjmllzykalbaum") - .withMaxConcurrentConnections("datadwqiucpj") - .withDisableMetricsCollection("datatbss") - .withCopyBehavior("databadkzpqctuplpkje"); - model = BinaryData.fromObject(model).toObject(FileSystemSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java deleted file mode 100644 index cb79426a12a6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FileSystemSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FileSystemSource; - -public final class FileSystemSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FileSystemSource model = BinaryData.fromString( - "{\"type\":\"FileSystemSource\",\"recursive\":\"datanxzvjnmpvsblu\",\"additionalColumns\":\"databhzukrpfbhihddi\",\"sourceRetryCount\":\"dataexyqyfkud\",\"sourceRetryWait\":\"dataqpwardpw\",\"maxConcurrentConnections\":\"datalvfisk\",\"disableMetricsCollection\":\"datasp\",\"\":{\"cs\":\"dataxnyock\",\"w\":\"datausdrgzmmr\"}}") - .toObject(FileSystemSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FileSystemSource model = new FileSystemSource().withSourceRetryCount("dataexyqyfkud") - .withSourceRetryWait("dataqpwardpw") - .withMaxConcurrentConnections("datalvfisk") - .withDisableMetricsCollection("datasp") - .withRecursive("datanxzvjnmpvsblu") - .withAdditionalColumns("databhzukrpfbhihddi"); - model = BinaryData.fromObject(model).toObject(FileSystemSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java deleted file mode 100644 index 61652e3ea976..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTests.java +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.FilterActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FilterActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FilterActivity model = BinaryData.fromString( - "{\"type\":\"Filter\",\"typeProperties\":{\"items\":{\"value\":\"esw\"},\"condition\":{\"value\":\"llqyvblfprskxhg\"}},\"name\":\"vgviycjulun\",\"description\":\"uficipibnjpiv\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"abmahjlahdpli\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Skipped\"],\"\":{\"bmjheyntsdwxpa\":\"dataa\"}},{\"activity\":\"ubgrjkgkoxuedml\",\"dependencyConditions\":[\"Failed\"],\"\":{\"jywpkbvvjyenw\":\"datay\",\"hwlkfljooiiviwlf\":\"datagvhhouhlt\"}}],\"userProperties\":[{\"name\":\"ycyvxbrthwbit\",\"value\":\"datawwkofoqrvnhcu\"}],\"\":{\"pdhrqjjlsat\":\"datavkzmgvtempyfjahw\",\"q\":\"dataxsgarxtgexmxgq\",\"totubzasrwoxumn\":\"dataulyrtkvftl\"}}") - .toObject(FilterActivity.class); - Assertions.assertEquals("vgviycjulun", model.name()); - Assertions.assertEquals("uficipibnjpiv", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("abmahjlahdpli", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ycyvxbrthwbit", model.userProperties().get(0).name()); - Assertions.assertEquals("esw", model.items().value()); - Assertions.assertEquals("llqyvblfprskxhg", model.condition().value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FilterActivity model = new FilterActivity().withName("vgviycjulun") - .withDescription("uficipibnjpiv") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("abmahjlahdpli") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ubgrjkgkoxuedml") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ycyvxbrthwbit").withValue("datawwkofoqrvnhcu"))) - .withItems(new Expression().withValue("esw")) - .withCondition(new Expression().withValue("llqyvblfprskxhg")); - model = BinaryData.fromObject(model).toObject(FilterActivity.class); - Assertions.assertEquals("vgviycjulun", model.name()); - Assertions.assertEquals("uficipibnjpiv", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("abmahjlahdpli", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ycyvxbrthwbit", model.userProperties().get(0).name()); - Assertions.assertEquals("esw", model.items().value()); - Assertions.assertEquals("llqyvblfprskxhg", model.condition().value()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java deleted file mode 100644 index 30b6e7c1cbdf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FilterActivityTypePropertiesTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.FilterActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.Expression; -import org.junit.jupiter.api.Assertions; - -public final class FilterActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FilterActivityTypeProperties model - = BinaryData.fromString("{\"items\":{\"value\":\"cqewxcwry\"},\"condition\":{\"value\":\"wmvcxyuem\"}}") - .toObject(FilterActivityTypeProperties.class); - Assertions.assertEquals("cqewxcwry", model.items().value()); - Assertions.assertEquals("wmvcxyuem", model.condition().value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FilterActivityTypeProperties model - = new FilterActivityTypeProperties().withItems(new Expression().withValue("cqewxcwry")) - .withCondition(new Expression().withValue("wmvcxyuem")); - model = BinaryData.fromObject(model).toObject(FilterActivityTypeProperties.class); - Assertions.assertEquals("cqewxcwry", model.items().value()); - Assertions.assertEquals("wmvcxyuem", model.condition().value()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java deleted file mode 100644 index 5d1fe5a16d44..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTests.java +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSink; -import com.azure.resourcemanager.datafactory.models.DataFlowSource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.Flowlet; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.Transformation; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FlowletTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Flowlet model = BinaryData.fromString( - "{\"type\":\"Flowlet\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"tqi\",\"parameters\":{\"xezppk\":\"datazvsgeafgfosehxlz\",\"zeqtoyrplixlajml\":\"datawaaeskyfjl\",\"evhamfowg\":\"datapq\"}},\"name\":\"btmkekxpkzwaq\",\"description\":\"fqovc\",\"dataset\":{\"referenceName\":\"qbplvfiduszte\",\"parameters\":{\"msfe\":\"datayj\",\"ihpq\":\"datax\",\"jfowxwy\":\"datadagrhrdicxdw\"}},\"linkedService\":{\"referenceName\":\"c\",\"parameters\":{\"uhoduchvlscrdpib\":\"datagbipcukdveks\",\"szekbh\":\"datadyjdussp\",\"hbfrnuybfflj\":\"datalkaaggkr\",\"dusr\":\"dataiimreoagsqta\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"f\",\"datasetParameters\":\"datasqwudohzilfmnli\",\"parameters\":{\"s\":\"datai\",\"hyqgsdrmmttjx\":\"dataeypofqpm\",\"wzm\":\"dataphgerhsmvgoh\",\"imsqywwwmhk\":\"datailrixysf\"},\"\":{\"zduewihapfjii\":\"dataaedrympmlqoin\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"iqfl\",\"parameters\":{\"edfsbwc\":\"datahpclb\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"vbvzipbwxgo\",\"parameters\":{\"admskx\":\"datazp\"}},\"name\":\"npd\",\"description\":\"igjsugswhgs\",\"dataset\":{\"referenceName\":\"dkwwn\",\"parameters\":{\"quvwsxbgn\":\"dataoctohzh\"}},\"linkedService\":{\"referenceName\":\"ervqchoadhrsxqvz\",\"parameters\":{\"fajglzrsubklrxhj\":\"dataabdsr\",\"eqq\":\"dataltcetjdvqyd\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ruwdxvqzxoebwgj\",\"datasetParameters\":\"dataibanb\",\"parameters\":{\"xbzrpejplssanb\":\"datawtzvpakloz\",\"nrswgkpjhboyik\":\"datattkgsux\",\"lgw\":\"databhuhk\"},\"\":{\"qzufgsyfejyvdwt\":\"datahueoijyzcqyp\"}}}],\"transformations\":[{\"name\":\"tpq\",\"description\":\"amkncfgy\",\"dataset\":{\"referenceName\":\"xsnxocuullojk\",\"parameters\":{\"wdjuxdbdljzgdy\":\"datahg\",\"sgzlrqhb\":\"datacvuq\"}},\"linkedService\":{\"referenceName\":\"qogdx\",\"parameters\":{\"minxojjluxxdh\":\"datafpyxxtjlflec\",\"qjm\":\"datalzzdz\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ay\",\"datasetParameters\":\"dataribqlotokh\",\"parameters\":{\"whnkbtl\":\"datataznkcqwwxwjyof\",\"tsnld\":\"dataljssm\",\"eogfgfiijrykwl\":\"datapwolgisubxb\"},\"\":{\"qceazfpxgnmq\":\"datas\"}}},{\"name\":\"zvluyqqaiosss\",\"description\":\"vaifppuacvfy\",\"dataset\":{\"referenceName\":\"w\",\"parameters\":{\"jdhsoy\":\"datax\",\"pboujs\":\"datahpvtyqftteh\",\"suenyg\":\"datakfvvdshxcde\"}},\"linkedService\":{\"referenceName\":\"cgjtfrnquktrfn\",\"parameters\":{\"s\":\"datalr\",\"wntfmtbgwjdxwna\":\"dataylt\",\"etw\":\"datakurrdreyzjwh\",\"kykcyqhyqqzz\":\"datajwzzqseuzu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"keys\",\"datasetParameters\":\"datawfopazdazg\",\"parameters\":{\"fut\":\"datagpewq\",\"kn\":\"datadpvozglqj\",\"tzeyowmndcovd\":\"datazcl\",\"mk\":\"datazqauxzan\"},\"\":{\"txudqyeme\":\"datauwkudrbcp\"}}},{\"name\":\"unaucm\",\"description\":\"rtneemmja\",\"dataset\":{\"referenceName\":\"cgxefnohaitraniz\",\"parameters\":{\"gfcoc\":\"datagudasmxubvfb\",\"lfmpztr\":\"datafhpri\",\"vhl\":\"dataud\"}},\"linkedService\":{\"referenceName\":\"culregpqt\",\"parameters\":{\"shqrdgrt\":\"datahvrztnvg\",\"fa\":\"datamewjzlpyk\",\"zrransyb\":\"datazwjcaye\",\"nkfscjfn\":\"datalpolwzrghsrle\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vuagfqwtl\",\"datasetParameters\":\"datagvmreuptrklzmi\",\"parameters\":{\"xfsv\":\"datawo\",\"nwlslrcigtzjcvbx\":\"dataghmp\",\"yxpavidnie\":\"datalapsnsso\",\"slpuxgcbdsva\":\"datawffcvvye\"},\"\":{\"vnjobfelhldiuhzz\":\"dataptwtrkxgpazwugxy\"}}}],\"script\":\"lmfaewzgiudjp\",\"scriptLines\":[\"httqh\",\"mhk\",\"ezsdsuxheq\"]},\"description\":\"cruxspinym\",\"annotations\":[\"datawokmik\",\"dataaz\",\"databmjxuvjipf\",\"datavhax\"],\"folder\":{\"name\":\"zaehpphthd\"}}") - .toObject(Flowlet.class); - Assertions.assertEquals("cruxspinym", model.description()); - Assertions.assertEquals("zaehpphthd", model.folder().name()); - Assertions.assertEquals("btmkekxpkzwaq", model.sources().get(0).name()); - Assertions.assertEquals("fqovc", model.sources().get(0).description()); - Assertions.assertEquals("qbplvfiduszte", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("c", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("f", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("tqi", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("npd", model.sinks().get(0).name()); - Assertions.assertEquals("igjsugswhgs", model.sinks().get(0).description()); - Assertions.assertEquals("dkwwn", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("ervqchoadhrsxqvz", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("ruwdxvqzxoebwgj", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("iqfl", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("vbvzipbwxgo", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("tpq", model.transformations().get(0).name()); - Assertions.assertEquals("amkncfgy", model.transformations().get(0).description()); - Assertions.assertEquals("xsnxocuullojk", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("qogdx", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("ay", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("lmfaewzgiudjp", model.script()); - Assertions.assertEquals("httqh", model.scriptLines().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Flowlet model = new Flowlet().withDescription("cruxspinym") - .withAnnotations(Arrays.asList("datawokmik", "dataaz", "databmjxuvjipf", "datavhax")) - .withFolder(new DataFlowFolder().withName("zaehpphthd")) - .withSources(Arrays.asList(new DataFlowSource().withName("btmkekxpkzwaq") - .withDescription("fqovc") - .withDataset(new DatasetReference().withReferenceName("qbplvfiduszte") - .withParameters(mapOf("msfe", "datayj", "ihpq", "datax", "jfowxwy", "datadagrhrdicxdw"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("c") - .withParameters(mapOf("uhoduchvlscrdpib", "datagbipcukdveks", "szekbh", "datadyjdussp", - "hbfrnuybfflj", "datalkaaggkr", "dusr", "dataiimreoagsqta"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("f") - .withDatasetParameters("datasqwudohzilfmnli") - .withParameters(mapOf("s", "datai", "hyqgsdrmmttjx", "dataeypofqpm", "wzm", "dataphgerhsmvgoh", - "imsqywwwmhk", "datailrixysf")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("tqi") - .withParameters(mapOf("xezppk", "datazvsgeafgfosehxlz", "zeqtoyrplixlajml", "datawaaeskyfjl", - "evhamfowg", "datapq"))))) - .withSinks(Arrays.asList(new DataFlowSink().withName("npd") - .withDescription("igjsugswhgs") - .withDataset( - new DatasetReference().withReferenceName("dkwwn").withParameters(mapOf("quvwsxbgn", "dataoctohzh"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("ervqchoadhrsxqvz") - .withParameters(mapOf("fajglzrsubklrxhj", "dataabdsr", "eqq", "dataltcetjdvqyd"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ruwdxvqzxoebwgj") - .withDatasetParameters("dataibanb") - .withParameters( - mapOf("xbzrpejplssanb", "datawtzvpakloz", "nrswgkpjhboyik", "datattkgsux", "lgw", "databhuhk")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("iqfl") - .withParameters(mapOf("edfsbwc", "datahpclb"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("vbvzipbwxgo") - .withParameters(mapOf("admskx", "datazp"))))) - .withTransformations(Arrays.asList( - new Transformation().withName("tpq") - .withDescription("amkncfgy") - .withDataset(new DatasetReference().withReferenceName("xsnxocuullojk") - .withParameters(mapOf("wdjuxdbdljzgdy", "datahg", "sgzlrqhb", "datacvuq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("qogdx") - .withParameters(mapOf("minxojjluxxdh", "datafpyxxtjlflec", "qjm", "datalzzdz"))) - .withFlowlet( - new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ay") - .withDatasetParameters("dataribqlotokh") - .withParameters(mapOf("whnkbtl", "datataznkcqwwxwjyof", "tsnld", "dataljssm", - "eogfgfiijrykwl", "datapwolgisubxb")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("zvluyqqaiosss") - .withDescription("vaifppuacvfy") - .withDataset(new DatasetReference().withReferenceName("w") - .withParameters( - mapOf("jdhsoy", "datax", "pboujs", "datahpvtyqftteh", "suenyg", "datakfvvdshxcde"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("cgjtfrnquktrfn") - .withParameters(mapOf("s", "datalr", "wntfmtbgwjdxwna", "dataylt", "etw", "datakurrdreyzjwh", - "kykcyqhyqqzz", "datajwzzqseuzu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("keys") - .withDatasetParameters("datawfopazdazg") - .withParameters(mapOf("fut", "datagpewq", "kn", "datadpvozglqj", "tzeyowmndcovd", "datazcl", - "mk", "datazqauxzan")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("unaucm") - .withDescription("rtneemmja") - .withDataset(new DatasetReference().withReferenceName("cgxefnohaitraniz") - .withParameters(mapOf("gfcoc", "datagudasmxubvfb", "lfmpztr", "datafhpri", "vhl", "dataud"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("culregpqt") - .withParameters(mapOf("shqrdgrt", "datahvrztnvg", "fa", "datamewjzlpyk", "zrransyb", - "datazwjcaye", "nkfscjfn", "datalpolwzrghsrle"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vuagfqwtl") - .withDatasetParameters("datagvmreuptrklzmi") - .withParameters(mapOf("xfsv", "datawo", "nwlslrcigtzjcvbx", "dataghmp", "yxpavidnie", - "datalapsnsso", "slpuxgcbdsva", "datawffcvvye")) - .withAdditionalProperties(mapOf())))) - .withScript("lmfaewzgiudjp") - .withScriptLines(Arrays.asList("httqh", "mhk", "ezsdsuxheq")); - model = BinaryData.fromObject(model).toObject(Flowlet.class); - Assertions.assertEquals("cruxspinym", model.description()); - Assertions.assertEquals("zaehpphthd", model.folder().name()); - Assertions.assertEquals("btmkekxpkzwaq", model.sources().get(0).name()); - Assertions.assertEquals("fqovc", model.sources().get(0).description()); - Assertions.assertEquals("qbplvfiduszte", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("c", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("f", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("tqi", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("npd", model.sinks().get(0).name()); - Assertions.assertEquals("igjsugswhgs", model.sinks().get(0).description()); - Assertions.assertEquals("dkwwn", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("ervqchoadhrsxqvz", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("ruwdxvqzxoebwgj", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("iqfl", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("vbvzipbwxgo", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("tpq", model.transformations().get(0).name()); - Assertions.assertEquals("amkncfgy", model.transformations().get(0).description()); - Assertions.assertEquals("xsnxocuullojk", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("qogdx", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("ay", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("lmfaewzgiudjp", model.script()); - Assertions.assertEquals("httqh", model.scriptLines().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java deleted file mode 100644 index 8d78facf8896..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FlowletTypePropertiesTests.java +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.FlowletTypeProperties; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSink; -import com.azure.resourcemanager.datafactory.models.DataFlowSource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.Transformation; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class FlowletTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FlowletTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"etatlakf\",\"parameters\":{\"bvtooxrpogwp\":\"dataxwgiks\",\"bfntumeezbxvq\":\"datachgjtnhtukfacih\",\"sgomtmjz\":\"databnuvwcg\",\"mkkhtgfredml\":\"dataxuqgovsxpwwztj\"}},\"name\":\"cgrllc\",\"description\":\"aovjow\",\"dataset\":{\"referenceName\":\"hpa\",\"parameters\":{\"lyotg\":\"datao\",\"genmvceb\":\"datawsxnsrqor\",\"dcqjkedwqurc\":\"dataeetqujxcxxq\"}},\"linkedService\":{\"referenceName\":\"jmrvvxwjongz\",\"parameters\":{\"chskxxka\":\"dataqqrsil\",\"aqgvto\":\"datasbvr\",\"jrthcfjzhx\":\"datarulfuct\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bqjr\",\"datasetParameters\":\"datatvrjeqmtz\",\"parameters\":{\"rdhabsr\":\"dataqrztrxal\",\"a\":\"datarsnrhp\",\"xkvvcs\":\"dataiwkkvya\",\"mlivrjjxnwx\":\"datamsvuvdjkqxetq\"},\"\":{\"jxlehzlx\":\"datap\",\"w\":\"datagfquwz\"}}},{\"schemaLinkedService\":{\"referenceName\":\"belwcer\",\"parameters\":{\"xniu\":\"databpjxljtxbusqtb\"}},\"name\":\"sdzhgbdgzpagsec\",\"description\":\"db\",\"dataset\":{\"referenceName\":\"qrgxf\",\"parameters\":{\"ellnkkii\":\"dataqiynez\",\"jfuaxroqvqpilrgu\":\"datavmtumxpym\"}},\"linkedService\":{\"referenceName\":\"anlduwzorxsb\",\"parameters\":{\"xym\":\"dataqk\",\"qepdx\":\"datakqv\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"uubwyvpjb\",\"datasetParameters\":\"datacpj\",\"parameters\":{\"exkydfb\":\"dataqgi\",\"vhuerkjddvrglieg\":\"datalj\"},\"\":{\"fgmwd\":\"datavbiiftksdwgdnk\",\"buvczldbglzoutb\":\"datac\",\"orbjg\":\"dataaqgzekajclyzgs\"}}},{\"schemaLinkedService\":{\"referenceName\":\"zjotvmrxkhlo\",\"parameters\":{\"yu\":\"datajbhvhdiqayflu\"}},\"name\":\"snuudtelvhyibdr\",\"description\":\"swhb\",\"dataset\":{\"referenceName\":\"bpyrowtjoxzt\",\"parameters\":{\"cta\":\"dataqchvczevjn\"}},\"linkedService\":{\"referenceName\":\"yvrtp\",\"parameters\":{\"yhwqw\":\"datamhzcgkrepdqh\",\"km\":\"datamvxqab\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xin\",\"datasetParameters\":\"datare\",\"parameters\":{\"angp\":\"datawhlpuzjpceezn\",\"phmsexroq\":\"databfaxyxzlbc\",\"nfee\":\"datandktxfv\"},\"\":{\"bgnixxoww\":\"datakrie\",\"p\":\"datakyfwnwpiwxeiicr\",\"dm\":\"datapk\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"jvskwsdgkjg\",\"parameters\":{\"fcvoinwoqar\":\"datawrasekw\",\"atdavuqmcbyms\":\"datawyxqiclad\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"bjlquv\",\"parameters\":{\"simi\":\"datacjumv\",\"l\":\"datayoi\",\"ibb\":\"datamiqwnnrac\",\"twukex\":\"dataqpspkladydgnha\"}},\"name\":\"gpmnmabe\",\"description\":\"qilwgdfpfqfpcvs\",\"dataset\":{\"referenceName\":\"l\",\"parameters\":{\"jj\":\"datavwerfwxbsmtb\",\"ci\":\"datah\",\"ekqhs\":\"datakwdvbtb\",\"ejuwyqwdqigmghgi\":\"datahtfpwpqb\"}},\"linkedService\":{\"referenceName\":\"txlujkhnjcmr\",\"parameters\":{\"qtwmlmhjnqtqeahj\":\"datamkh\",\"vgua\":\"datadvragpokddxejhh\",\"k\":\"datatpt\",\"gbvoffbkk\":\"datawkqyns\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xaexqokmyrljial\",\"datasetParameters\":\"datanobrqlpb\",\"parameters\":{\"hsqe\":\"datarpzuyudivbx\",\"y\":\"dataeonqelwgdhuru\",\"ogatmoljiy\":\"dataza\",\"knsjulugd\":\"datampinmzvfkneerzzt\"},\"\":{\"xlelfjh\":\"datah\",\"dmiwjekpty\":\"dataeizcpih\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ydbjzcqymlcf\",\"parameters\":{\"pstauol\":\"datamhsurlgwqkpmm\",\"hvvlrl\":\"datawiubmomsgvv\",\"swaeqkzfz\":\"datahewjjmajnkdflqio\",\"xrmexznlw\":\"dataxjoshohtotryegpk\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"fokxk\",\"parameters\":{\"ufgjblcdr\":\"dataze\"}},\"name\":\"yfcemftz\",\"description\":\"ykyalugek\",\"dataset\":{\"referenceName\":\"qnhttwdowrczfj\",\"parameters\":{\"cifrhjulrsulwzpf\":\"datauxxrkkmhmnulwemp\",\"hhgzotf\":\"datausnaw\"}},\"linkedService\":{\"referenceName\":\"yrgkoekv\",\"parameters\":{\"pcqydeykvsk\":\"dataxyxhighctxbxmo\",\"frjeizikgqaboohx\":\"datacz\",\"rmdvewuyqaeohpj\":\"datamsgycqs\",\"aurghooxa\":\"datagejkbvhh\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"k\",\"datasetParameters\":\"datajxjoezlq\",\"parameters\":{\"tmbozomtzamicbig\":\"datadknkobe\"},\"\":{\"raabmdlqjb\":\"datagzseznuxkeua\",\"ixlhupm\":\"datadp\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ihzbdnpxpk\",\"parameters\":{\"rufssjyg\":\"datareyxelyicghf\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"fxrk\",\"parameters\":{\"efgvqcp\":\"datammgmqf\",\"djhunhghcgawnr\":\"datawjgquxweyslandkd\",\"bfsx\":\"datanquoxsotireimse\"}},\"name\":\"tcyilbvz\",\"description\":\"xcjzlquze\",\"dataset\":{\"referenceName\":\"kjxebj\",\"parameters\":{\"v\":\"datainzabwmvoglj\",\"ehaqidoyzltgio\":\"datapgidnw\",\"sergdtpe\":\"dataqoqpepiaeap\",\"qq\":\"datanacyheqwb\"}},\"linkedService\":{\"referenceName\":\"jubkhjozfymcwmb\",\"parameters\":{\"qiipsejb\":\"datavqyvl\",\"ieswhddzydisn\":\"datavsi\",\"i\":\"dataepywyjlnldpxottd\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qibzji\",\"datasetParameters\":\"dataee\",\"parameters\":{\"jhdhz\":\"datahryvcjwqwoqsra\"},\"\":{\"gv\":\"datapijhfrzgdkk\"}}},{\"schemaLinkedService\":{\"referenceName\":\"khsusmmorf\",\"parameters\":{\"neyttl\":\"datawilzzhnijmriprlk\",\"bkut\":\"datacxiv\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"mltwjfluxynbpvzl\",\"parameters\":{\"khmocgjs\":\"dataauyqnj\"}},\"name\":\"gouarhwvixqq\",\"description\":\"ljky\",\"dataset\":{\"referenceName\":\"jrclrvtzq\",\"parameters\":{\"y\":\"datactbhpjhxpcvrd\",\"n\":\"datait\"}},\"linkedService\":{\"referenceName\":\"ad\",\"parameters\":{\"iceevsaaxwspca\":\"datajahwriuomzczf\",\"gzzromv\":\"dataikhfjqebglcxk\",\"sem\":\"datag\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"srfsvpinkzpatqt\",\"datasetParameters\":\"dataswxspvckojaz\",\"parameters\":{\"vdylytcovqseusrf\":\"dataspftesubzpv\",\"swkjmdihdcyyyz\":\"databdxzfxnxmlbmu\"},\"\":{\"m\":\"databwzjnufzrf\"}}}],\"transformations\":[{\"name\":\"nn\",\"description\":\"rtftedz\",\"dataset\":{\"referenceName\":\"bjtvgjsxmtyjj\",\"parameters\":{\"obt\":\"datadpw\",\"cauwazcgwdfriwg\":\"dataphtitsffofwanmhk\",\"byfg\":\"databjpozokscvgllixd\",\"ohutxlcsk\":\"dataewqkjvxprwpxs\"}},\"linkedService\":{\"referenceName\":\"e\",\"parameters\":{\"lfb\":\"dataggg\",\"srtmdylperpiltt\":\"datardcgu\",\"resrgvtshuvft\":\"datazgczfcmfpfbode\",\"muqkevzgjypanhx\":\"dataai\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"xxzetwwzjwotnx\",\"datasetParameters\":\"datafhglhrfo\",\"parameters\":{\"cselqxovppqibuk\":\"dataecrsnh\"},\"\":{\"zivfqbqnasdsy\":\"datazrlrmlccmetjs\",\"vdgxly\":\"datanzsieuscplh\",\"ezsvkolrupjov\":\"datakxitds\"}}},{\"name\":\"ozsaye\",\"description\":\"azwzlpzbtzuykyki\",\"dataset\":{\"referenceName\":\"sdyepfno\",\"parameters\":{\"wyqejgaao\":\"dataezacfpztga\",\"ngdyfcixrhlcq\":\"datactgkppgkqzkcyzmf\",\"goiutgw\":\"datahoe\"}},\"linkedService\":{\"referenceName\":\"kahpqhazynta\",\"parameters\":{\"i\":\"datancogmipnmliqmvlb\",\"sdfjyiesoc\":\"dataeaqgrvgpomxpupd\",\"mpyzgleo\":\"dataiqbuou\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"bgbwwz\",\"datasetParameters\":\"dataajfwnncfm\",\"parameters\":{\"bouguxtndopgjt\":\"dataqgjjrlhiqlwixv\",\"alapdlndbe\":\"databas\",\"ixv\":\"dataqb\"},\"\":{\"zfe\":\"dataynpbbfqvzfjmspu\"}}}],\"script\":\"jljmphfkyezol\",\"scriptLines\":[\"mi\",\"uydoccnx\",\"hanzbuiad\",\"batecaat\"]}") - .toObject(FlowletTypeProperties.class); - Assertions.assertEquals("cgrllc", model.sources().get(0).name()); - Assertions.assertEquals("aovjow", model.sources().get(0).description()); - Assertions.assertEquals("hpa", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("jmrvvxwjongz", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("bqjr", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("etatlakf", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("gpmnmabe", model.sinks().get(0).name()); - Assertions.assertEquals("qilwgdfpfqfpcvs", model.sinks().get(0).description()); - Assertions.assertEquals("l", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("txlujkhnjcmr", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("xaexqokmyrljial", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("jvskwsdgkjg", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("bjlquv", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("nn", model.transformations().get(0).name()); - Assertions.assertEquals("rtftedz", model.transformations().get(0).description()); - Assertions.assertEquals("bjtvgjsxmtyjj", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("e", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("xxzetwwzjwotnx", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("jljmphfkyezol", model.script()); - Assertions.assertEquals("mi", model.scriptLines().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FlowletTypeProperties model - = new FlowletTypeProperties() - .withSources( - Arrays - .asList( - new DataFlowSource().withName("cgrllc") - .withDescription("aovjow") - .withDataset(new DatasetReference().withReferenceName("hpa") - .withParameters(mapOf("lyotg", "datao", "genmvceb", "datawsxnsrqor", "dcqjkedwqurc", - "dataeetqujxcxxq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("jmrvvxwjongz") - .withParameters(mapOf("chskxxka", "dataqqrsil", "aqgvto", "datasbvr", "jrthcfjzhx", - "datarulfuct"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("bqjr") - .withDatasetParameters("datatvrjeqmtz") - .withParameters(mapOf("rdhabsr", "dataqrztrxal", "a", "datarsnrhp", "xkvvcs", - "dataiwkkvya", "mlivrjjxnwx", "datamsvuvdjkqxetq")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("etatlakf") - .withParameters(mapOf( - "bvtooxrpogwp", "dataxwgiks", "bfntumeezbxvq", "datachgjtnhtukfacih", - "sgomtmjz", "databnuvwcg", "mkkhtgfredml", "dataxuqgovsxpwwztj"))), - new DataFlowSource().withName("sdzhgbdgzpagsec") - .withDescription("db") - .withDataset(new DatasetReference().withReferenceName("qrgxf") - .withParameters( - mapOf("ellnkkii", "dataqiynez", "jfuaxroqvqpilrgu", "datavmtumxpym"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("anlduwzorxsb") - .withParameters(mapOf("xym", "dataqk", "qepdx", "datakqv"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("uubwyvpjb") - .withDatasetParameters("datacpj") - .withParameters(mapOf("exkydfb", "dataqgi", "vhuerkjddvrglieg", "datalj")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference() - .withReferenceName("belwcer") - .withParameters(mapOf("xniu", "databpjxljtxbusqtb"))), - new DataFlowSource().withName("snuudtelvhyibdr") - .withDescription("swhb") - .withDataset(new DatasetReference().withReferenceName("bpyrowtjoxzt") - .withParameters(mapOf("cta", "dataqchvczevjn"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yvrtp") - .withParameters(mapOf("yhwqw", "datamhzcgkrepdqh", "km", "datamvxqab"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xin") - .withDatasetParameters("datare") - .withParameters(mapOf("angp", "datawhlpuzjpceezn", "phmsexroq", "databfaxyxzlbc", - "nfee", "datandktxfv")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("zjotvmrxkhlo") - .withParameters(mapOf("yu", "datajbhvhdiqayflu"))))) - .withSinks(Arrays.asList( - new DataFlowSink().withName("gpmnmabe") - .withDescription("qilwgdfpfqfpcvs") - .withDataset(new DatasetReference().withReferenceName("l") - .withParameters(mapOf("jj", "datavwerfwxbsmtb", "ci", "datah", "ekqhs", "datakwdvbtb", - "ejuwyqwdqigmghgi", "datahtfpwpqb"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("txlujkhnjcmr") - .withParameters(mapOf("qtwmlmhjnqtqeahj", "datamkh", "vgua", "datadvragpokddxejhh", "k", - "datatpt", "gbvoffbkk", "datawkqyns"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xaexqokmyrljial") - .withDatasetParameters("datanobrqlpb") - .withParameters(mapOf("hsqe", "datarpzuyudivbx", "y", "dataeonqelwgdhuru", "ogatmoljiy", - "dataza", "knsjulugd", "datampinmzvfkneerzzt")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("jvskwsdgkjg") - .withParameters(mapOf("fcvoinwoqar", "datawrasekw", "atdavuqmcbyms", "datawyxqiclad"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("bjlquv") - .withParameters(mapOf("simi", "datacjumv", "l", "datayoi", "ibb", "datamiqwnnrac", "twukex", - "dataqpspkladydgnha"))), - new DataFlowSink().withName("yfcemftz") - .withDescription("ykyalugek") - .withDataset(new DatasetReference().withReferenceName("qnhttwdowrczfj") - .withParameters(mapOf("cifrhjulrsulwzpf", "datauxxrkkmhmnulwemp", "hhgzotf", "datausnaw"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yrgkoekv") - .withParameters(mapOf("pcqydeykvsk", "dataxyxhighctxbxmo", "frjeizikgqaboohx", "datacz", - "rmdvewuyqaeohpj", "datamsgycqs", "aurghooxa", "datagejkbvhh"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("k") - .withDatasetParameters("datajxjoezlq") - .withParameters(mapOf("tmbozomtzamicbig", "datadknkobe")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ydbjzcqymlcf") - .withParameters(mapOf("pstauol", "datamhsurlgwqkpmm", "hvvlrl", "datawiubmomsgvv", - "swaeqkzfz", "datahewjjmajnkdflqio", "xrmexznlw", "dataxjoshohtotryegpk"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("fokxk") - .withParameters(mapOf("ufgjblcdr", "dataze"))), - new DataFlowSink().withName("tcyilbvz") - .withDescription("xcjzlquze") - .withDataset(new DatasetReference().withReferenceName("kjxebj") - .withParameters(mapOf("v", "datainzabwmvoglj", "ehaqidoyzltgio", "datapgidnw", "sergdtpe", - "dataqoqpepiaeap", "qq", "datanacyheqwb"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("jubkhjozfymcwmb") - .withParameters(mapOf("qiipsejb", "datavqyvl", "ieswhddzydisn", "datavsi", "i", - "dataepywyjlnldpxottd"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("qibzji") - .withDatasetParameters("dataee") - .withParameters(mapOf("jhdhz", "datahryvcjwqwoqsra")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ihzbdnpxpk") - .withParameters(mapOf("rufssjyg", "datareyxelyicghf"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("fxrk") - .withParameters(mapOf("efgvqcp", "datammgmqf", "djhunhghcgawnr", "datawjgquxweyslandkd", - "bfsx", "datanquoxsotireimse"))), - new DataFlowSink().withName("gouarhwvixqq") - .withDescription("ljky") - .withDataset(new DatasetReference().withReferenceName("jrclrvtzq") - .withParameters(mapOf("y", "datactbhpjhxpcvrd", "n", "datait"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("ad") - .withParameters(mapOf("iceevsaaxwspca", "datajahwriuomzczf", "gzzromv", "dataikhfjqebglcxk", - "sem", "datag"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("srfsvpinkzpatqt") - .withDatasetParameters("dataswxspvckojaz") - .withParameters( - mapOf("vdylytcovqseusrf", "dataspftesubzpv", "swkjmdihdcyyyz", "databdxzfxnxmlbmu")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("khsusmmorf") - .withParameters(mapOf("neyttl", "datawilzzhnijmriprlk", "bkut", "datacxiv"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("mltwjfluxynbpvzl") - .withParameters(mapOf("khmocgjs", "dataauyqnj"))))) - .withTransformations(Arrays.asList( - new Transformation().withName("nn") - .withDescription("rtftedz") - .withDataset(new DatasetReference().withReferenceName("bjtvgjsxmtyjj") - .withParameters(mapOf("obt", "datadpw", "cauwazcgwdfriwg", "dataphtitsffofwanmhk", "byfg", - "databjpozokscvgllixd", "ohutxlcsk", "dataewqkjvxprwpxs"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("e") - .withParameters(mapOf("lfb", "dataggg", "srtmdylperpiltt", "datardcgu", "resrgvtshuvft", - "datazgczfcmfpfbode", "muqkevzgjypanhx", "dataai"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("xxzetwwzjwotnx") - .withDatasetParameters("datafhglhrfo") - .withParameters(mapOf("cselqxovppqibuk", "dataecrsnh")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("ozsaye") - .withDescription("azwzlpzbtzuykyki") - .withDataset(new DatasetReference().withReferenceName("sdyepfno") - .withParameters(mapOf("wyqejgaao", "dataezacfpztga", "ngdyfcixrhlcq", - "datactgkppgkqzkcyzmf", "goiutgw", "datahoe"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("kahpqhazynta") - .withParameters(mapOf("i", "datancogmipnmliqmvlb", "sdfjyiesoc", "dataeaqgrvgpomxpupd", - "mpyzgleo", "dataiqbuou"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("bgbwwz") - .withDatasetParameters("dataajfwnncfm") - .withParameters( - mapOf("bouguxtndopgjt", "dataqgjjrlhiqlwixv", "alapdlndbe", "databas", "ixv", "dataqb")) - .withAdditionalProperties(mapOf())))) - .withScript("jljmphfkyezol") - .withScriptLines(Arrays.asList("mi", "uydoccnx", "hanzbuiad", "batecaat")); - model = BinaryData.fromObject(model).toObject(FlowletTypeProperties.class); - Assertions.assertEquals("cgrllc", model.sources().get(0).name()); - Assertions.assertEquals("aovjow", model.sources().get(0).description()); - Assertions.assertEquals("hpa", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("jmrvvxwjongz", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("bqjr", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("etatlakf", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("gpmnmabe", model.sinks().get(0).name()); - Assertions.assertEquals("qilwgdfpfqfpcvs", model.sinks().get(0).description()); - Assertions.assertEquals("l", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("txlujkhnjcmr", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("xaexqokmyrljial", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("jvskwsdgkjg", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("bjlquv", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("nn", model.transformations().get(0).name()); - Assertions.assertEquals("rtftedz", model.transformations().get(0).description()); - Assertions.assertEquals("bjtvgjsxmtyjj", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("e", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("xxzetwwzjwotnx", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("jljmphfkyezol", model.script()); - Assertions.assertEquals("mi", model.scriptLines().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java deleted file mode 100644 index 1e8fd7e39b0d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTests.java +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.ForEachActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ForEachActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ForEachActivity model = BinaryData.fromString( - "{\"type\":\"ForEach\",\"typeProperties\":{\"isSequential\":false,\"batchCount\":1331977169,\"items\":{\"value\":\"rxhucxmybuqjpgb\"},\"activities\":[{\"type\":\"Activity\",\"name\":\"axga\",\"description\":\"fyinh\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xcuamdydkdcvow\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Failed\",\"Succeeded\"],\"\":{\"vhfdezomykjbl\":\"datayvopotiefxhaqq\",\"vmymfaiw\":\"dataypuon\",\"xsqcvabyzdaroe\":\"datalrphadd\"}}],\"userProperties\":[{\"name\":\"pau\",\"value\":\"datalety\"},{\"name\":\"zziavguskvvnzn\",\"value\":\"datahboqeue\"}],\"\":{\"zda\":\"dataldjkkvac\",\"fuqqb\":\"datajnsbdw\"}},{\"type\":\"Activity\",\"name\":\"frb\",\"description\":\"spzkvoknmeredn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"cnhdecx\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Skipped\"],\"\":{\"ntpksbtige\":\"dataxfdojxbyxfx\",\"asvahbqoojd\":\"datawaidqzf\",\"danlhql\":\"datamrxjlumrzfdbotnk\",\"ozvcxxezur\":\"datambgiaoxpf\"}}],\"userProperties\":[{\"name\":\"cnsspbleazvyftk\",\"value\":\"databbribgc\"},{\"name\":\"kkmrlptdkwib\",\"value\":\"datarivedshuxlhecz\"}],\"\":{\"bviw\":\"datawm\",\"lponkrhpyediu\":\"datarjeysj\",\"pcjrbfayduzzyxly\":\"dataskcadkyoouv\",\"anec\":\"datawtwzufm\"}},{\"type\":\"Activity\",\"name\":\"fpbf\",\"description\":\"ghohoxcontsrv\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"vcl\",\"dependencyConditions\":[\"Completed\"],\"\":{\"zs\":\"datamfn\"}},{\"activity\":\"hkkktlodsyyzmf\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"ictej\":\"datat\",\"gtcd\":\"datazbasxapc\",\"vqysghk\":\"datafetpkttjnneynm\",\"fiddnktutwcz\":\"dataxgxqdmvfdocjaf\"}},{\"activity\":\"wmtfjzuqhyqvm\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"igtsrrlelpobm\":\"datadpeedzowverhtyc\",\"qgluhr\":\"datamdsisll\",\"fqb\":\"dataimojozhdcptxxb\"}}],\"userProperties\":[{\"name\":\"wnyudcvqeowepv\",\"value\":\"datarngiffsnt\"},{\"name\":\"pfqguovqqrcyeumw\",\"value\":\"datazagurgur\"},{\"name\":\"cguwyuzhkefownc\",\"value\":\"datadcrwoiqsrqebjgo\"}],\"\":{\"qfkmseaomqqb\":\"datacahdagchk\"}}]},\"name\":\"lwxcfjvedxyeb\",\"description\":\"wnmnxppgfep\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"tae\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"fbmrppjfceab\":\"dataynvv\",\"ewi\":\"datapwzs\",\"vdjmvzcycg\":\"datan\"}},{\"activity\":\"telimqxwih\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Skipped\",\"Completed\"],\"\":{\"zrmrv\":\"datauziglri\",\"lotwnppstpq\":\"datacbfcis\",\"eawolhlfffe\":\"datas\",\"qtvxhipchdpd\":\"datanbmhqylrsywisc\"}},{\"activity\":\"vcmpoqkcikfes\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"pypzgdet\":\"datapsx\",\"gyhu\":\"datad\"}}],\"userProperties\":[{\"name\":\"tspocrskkraapczm\",\"value\":\"dataiiftjigtqyzocf\"},{\"name\":\"yw\",\"value\":\"dataflciooxybmktb\"}],\"\":{\"wtjokuhrtqnb\":\"datajcepyc\",\"ojvejxhfeo\":\"datagcnicknsbbccbqx\",\"idlpmlxhzwyy\":\"datazftfyjcen\"}}") - .toObject(ForEachActivity.class); - Assertions.assertEquals("lwxcfjvedxyeb", model.name()); - Assertions.assertEquals("wnmnxppgfep", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("tae", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tspocrskkraapczm", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(1331977169, model.batchCount()); - Assertions.assertEquals("rxhucxmybuqjpgb", model.items().value()); - Assertions.assertEquals("axga", model.activities().get(0).name()); - Assertions.assertEquals("fyinh", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xcuamdydkdcvow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("pau", model.activities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ForEachActivity model = new ForEachActivity().withName("lwxcfjvedxyeb") - .withDescription("wnmnxppgfep") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("tae") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("telimqxwih") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vcmpoqkcikfes") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("tspocrskkraapczm").withValue("dataiiftjigtqyzocf"), - new UserProperty().withName("yw").withValue("dataflciooxybmktb"))) - .withIsSequential(false) - .withBatchCount(1331977169) - .withItems(new Expression().withValue("rxhucxmybuqjpgb")) - .withActivities(Arrays.asList( - new Activity().withName("axga") - .withDescription("fyinh") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("xcuamdydkdcvow") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("pau").withValue("datalety"), - new UserProperty().withName("zziavguskvvnzn").withValue("datahboqeue"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("frb") - .withDescription("spzkvoknmeredn") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("cnhdecx") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("cnsspbleazvyftk").withValue("databbribgc"), - new UserProperty().withName("kkmrlptdkwib").withValue("datarivedshuxlhecz"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("fpbf") - .withDescription("ghohoxcontsrv") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("vcl") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hkkktlodsyyzmf") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wmtfjzuqhyqvm") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("wnyudcvqeowepv").withValue("datarngiffsnt"), - new UserProperty().withName("pfqguovqqrcyeumw").withValue("datazagurgur"), - new UserProperty().withName("cguwyuzhkefownc").withValue("datadcrwoiqsrqebjgo"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(ForEachActivity.class); - Assertions.assertEquals("lwxcfjvedxyeb", model.name()); - Assertions.assertEquals("wnmnxppgfep", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("tae", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("tspocrskkraapczm", model.userProperties().get(0).name()); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(1331977169, model.batchCount()); - Assertions.assertEquals("rxhucxmybuqjpgb", model.items().value()); - Assertions.assertEquals("axga", model.activities().get(0).name()); - Assertions.assertEquals("fyinh", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xcuamdydkdcvow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("pau", model.activities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java deleted file mode 100644 index 90dd10c36bd9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ForEachActivityTypePropertiesTests.java +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ForEachActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ForEachActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ForEachActivityTypeProperties model = BinaryData.fromString( - "{\"isSequential\":false,\"batchCount\":201071910,\"items\":{\"value\":\"mlhgytkthevodd\"},\"activities\":[{\"type\":\"Activity\",\"name\":\"egw\",\"description\":\"xdfdjftcrj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"txtc\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"xkphaqtnejufljqz\":\"datatwlpuur\",\"cxyxepllb\":\"dataixlzaavvuvhyerj\"}},{\"activity\":\"eepf\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Succeeded\",\"Failed\"],\"\":{\"bqpgncscwsefd\":\"datawfqjweigyw\",\"l\":\"datansuao\"}}],\"userProperties\":[{\"name\":\"yjdcvnanejmc\",\"value\":\"datallec\"}],\"\":{\"ofxkelwvcyprpog\":\"datafgyhkv\",\"ochpzcgs\":\"dataqvuftkiyghcmpyki\",\"t\":\"datapklfnst\",\"tkzesfdrsgf\":\"datapww\"}},{\"type\":\"Activity\",\"name\":\"dshdwivep\",\"description\":\"trfunmkcj\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"dukrjjfnexuqalwj\",\"dependencyConditions\":[\"Completed\",\"Skipped\"],\"\":{\"jdcehlgr\":\"datazemdffy\",\"ubfotgi\":\"datav\",\"x\":\"dataporioki\",\"dtn\":\"datafftt\"}},{\"activity\":\"lggtr\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Succeeded\",\"Skipped\"],\"\":{\"sguladdujzenagm\":\"dataqcinjejyinl\",\"gibmngbkqcprbw\":\"datahmgtbqzftm\",\"mgyjvjyxueuq\":\"datandlold\",\"gxak\":\"databgbs\"}},{\"activity\":\"kbryolzbmdntajgg\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"s\":\"datamsbhdixzao\",\"fuwtlu\":\"datanxgk\"}}],\"userProperties\":[{\"name\":\"wwfeixmueu\",\"value\":\"datapivsltlyqcrpwndc\"},{\"name\":\"rdqcmsrzrcddl\",\"value\":\"datagaoptwqf\"}],\"\":{\"dpmez\":\"datacondaoptr\"}},{\"type\":\"Activity\",\"name\":\"loflcilrafkrvv\",\"description\":\"knymqzmuinuvtgjg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"uoslz\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"cvcnevkfkmenawsv\":\"datavwcjrbjgdvwa\"}},{\"activity\":\"imq\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"comqyqvywzhb\":\"dataxgsuuapktfvemwf\"}},{\"activity\":\"kl\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\"],\"\":{\"snfeybkhyqouzzp\":\"datapsqkv\",\"zsnvtlhnmydshg\":\"datanl\",\"zyqnfsjnrfpzlv\":\"datadvwshc\",\"vvrk\":\"dataeojnskekhmo\"}}],\"userProperties\":[{\"name\":\"fazsiizcwha\",\"value\":\"datajhaetyeafj\"},{\"name\":\"ismacacdyajy\",\"value\":\"datawvqlrzobvkgfp\"},{\"name\":\"bjavnkyqrjbzrz\",\"value\":\"datahthukuypyeof\"},{\"name\":\"aeabbxkldtw\",\"value\":\"dataryc\"}],\"\":{\"ilcdbudfwl\":\"dataldgbgua\"}},{\"type\":\"Activity\",\"name\":\"gp\",\"description\":\"rfkslgpl\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"hgpzqibqilcntmu\",\"dependencyConditions\":[\"Completed\"],\"\":{\"aawentkokndjwpx\":\"dataarj\",\"wnnvtlbclg\":\"dataanjqfwxicb\"}},{\"activity\":\"kfwofweayowzp\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\"],\"\":{\"asxuhiwymmii\":\"datamuzpdjthpsy\",\"p\":\"dataffjgjmysn\"}},{\"activity\":\"rzqagmcivsqawia\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"zakqgabrb\":\"dataeuitkfvdjg\",\"wuy\":\"datauxg\",\"hrxjiw\":\"dataulozdoi\"}},{\"activity\":\"iv\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\"],\"\":{\"ciokbvft\":\"datacwnbqlaubazyri\",\"nsllfkcroviim\":\"dataah\",\"w\":\"datadlmag\"}}],\"userProperties\":[{\"name\":\"xtendfpdoxt\",\"value\":\"datafosxxkktkloj\"},{\"name\":\"gsbyst\",\"value\":\"datanwjgsvlleflir\"}],\"\":{\"tufmujadippdntun\":\"dataptvkjdowuzasd\",\"xmaacrqrovbozj\":\"datapeeprmeb\"}}]}") - .toObject(ForEachActivityTypeProperties.class); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(201071910, model.batchCount()); - Assertions.assertEquals("mlhgytkthevodd", model.items().value()); - Assertions.assertEquals("egw", model.activities().get(0).name()); - Assertions.assertEquals("xdfdjftcrj", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("txtc", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("yjdcvnanejmc", model.activities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ForEachActivityTypeProperties model - = new ForEachActivityTypeProperties().withIsSequential(false) - .withBatchCount(201071910) - .withItems(new Expression().withValue("mlhgytkthevodd")) - .withActivities( - Arrays - .asList( - new Activity().withName("egw") - .withDescription("xdfdjftcrj") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("txtc") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("eepf") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("yjdcvnanejmc").withValue("datallec"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("dshdwivep") - .withDescription("trfunmkcj") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays - .asList( - new ActivityDependency().withActivity("dukrjjfnexuqalwj") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lggtr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kbryolzbmdntajgg") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("wwfeixmueu").withValue("datapivsltlyqcrpwndc"), - new UserProperty().withName("rdqcmsrzrcddl").withValue("datagaoptwqf"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("loflcilrafkrvv") - .withDescription("knymqzmuinuvtgjg") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("uoslz") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("imq") - .withDependencyConditions(Arrays - .asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kl") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("fazsiizcwha").withValue("datajhaetyeafj"), - new UserProperty().withName("ismacacdyajy").withValue("datawvqlrzobvkgfp"), - new UserProperty().withName("bjavnkyqrjbzrz").withValue("datahthukuypyeof"), - new UserProperty().withName("aeabbxkldtw").withValue("dataryc"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("gp") - .withDescription("rfkslgpl") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("hgpzqibqilcntmu") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kfwofweayowzp") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rzqagmcivsqawia") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("iv") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("xtendfpdoxt").withValue("datafosxxkktkloj"), - new UserProperty().withName("gsbyst").withValue("datanwjgsvlleflir"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(ForEachActivityTypeProperties.class); - Assertions.assertEquals(false, model.isSequential()); - Assertions.assertEquals(201071910, model.batchCount()); - Assertions.assertEquals("mlhgytkthevodd", model.items().value()); - Assertions.assertEquals("egw", model.activities().get(0).name()); - Assertions.assertEquals("xdfdjftcrj", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("txtc", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("yjdcvnanejmc", model.activities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java deleted file mode 100644 index d5784f44cac5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatReadSettingsTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FormatReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class FormatReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FormatReadSettings model - = BinaryData.fromString("{\"type\":\"FormatReadSettings\",\"\":{\"xqtgzvzcfmwfogjr\":\"datadkqffhux\"}}") - .toObject(FormatReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FormatReadSettings model - = new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings")); - model = BinaryData.fromObject(model).toObject(FormatReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java deleted file mode 100644 index 21cb1a5eb388..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FormatWriteSettingsTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FormatWriteSettings; -import java.util.HashMap; -import java.util.Map; - -public final class FormatWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FormatWriteSettings model = BinaryData - .fromString("{\"type\":\"FormatWriteSettings\",\"\":{\"jsvymozryyyvlxm\":\"datacxyrsleghozsm\"}}") - .toObject(FormatWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FormatWriteSettings model - = new FormatWriteSettings().withAdditionalProperties(mapOf("type", "FormatWriteSettings")); - model = BinaryData.fromObject(model).toObject(FormatWriteSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java deleted file mode 100644 index 322458af6e63..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpReadSettingsTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FtpReadSettings; - -public final class FtpReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FtpReadSettings model = BinaryData.fromString( - "{\"type\":\"FtpReadSettings\",\"recursive\":\"datajabzs\",\"wildcardFolderPath\":\"datauza\",\"wildcardFileName\":\"datakuthooa\",\"enablePartitionDiscovery\":\"datamszcwwvbxvibiv\",\"partitionRootPath\":\"dataryppmmtqxqiqae\",\"deleteFilesAfterCompletion\":\"datagqfcoraj\",\"fileListPath\":\"dataglkffgyfe\",\"useBinaryTransfer\":\"dataseceuvyn\",\"disableChunking\":\"databzcufxfyzqrs\",\"maxConcurrentConnections\":\"datamlsud\",\"disableMetricsCollection\":\"dataiycepjkllyeds\",\"\":{\"mwl\":\"databvirnxey\",\"hizwqz\":\"datapyidecff\",\"zsw\":\"datasqwjypiosz\"}}") - .toObject(FtpReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FtpReadSettings model = new FtpReadSettings().withMaxConcurrentConnections("datamlsud") - .withDisableMetricsCollection("dataiycepjkllyeds") - .withRecursive("datajabzs") - .withWildcardFolderPath("datauza") - .withWildcardFileName("datakuthooa") - .withEnablePartitionDiscovery("datamszcwwvbxvibiv") - .withPartitionRootPath("dataryppmmtqxqiqae") - .withDeleteFilesAfterCompletion("datagqfcoraj") - .withFileListPath("dataglkffgyfe") - .withUseBinaryTransfer("dataseceuvyn") - .withDisableChunking("databzcufxfyzqrs"); - model = BinaryData.fromObject(model).toObject(FtpReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java deleted file mode 100644 index c2d025aeb3b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/FtpServerLocationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FtpServerLocation; - -public final class FtpServerLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - FtpServerLocation model = BinaryData.fromString( - "{\"type\":\"FtpServerLocation\",\"folderPath\":\"datakgtlzl\",\"fileName\":\"datarlxcznnhz\",\"\":{\"tygeqzus\":\"databmxlxm\",\"reesrfwsszvl\":\"datatoqcahfsgbjm\"}}") - .toObject(FtpServerLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - FtpServerLocation model = new FtpServerLocation().withFolderPath("datakgtlzl").withFileName("datarlxcznnhz"); - model = BinaryData.fromObject(model).toObject(FtpServerLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java deleted file mode 100644 index a52d0e2ab852..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GenericDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GenericDatasetTypeProperties; - -public final class GenericDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GenericDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"dataewy\"}").toObject(GenericDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GenericDatasetTypeProperties model = new GenericDatasetTypeProperties().withTableName("dataewy"); - model = BinaryData.fromObject(model).toObject(GenericDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java deleted file mode 100644 index 36878fad39be..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetDataFactoryOperationStatusResponseTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GetDataFactoryOperationStatusResponse; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GetDataFactoryOperationStatusResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GetDataFactoryOperationStatusResponse model - = BinaryData.fromString("{\"status\":\"n\",\"\":{\"pusllywpv\":\"dataqjrhuzgfxonj\"}}") - .toObject(GetDataFactoryOperationStatusResponse.class); - Assertions.assertEquals("n", model.status()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GetDataFactoryOperationStatusResponse model - = new GetDataFactoryOperationStatusResponse().withStatus("n").withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(GetDataFactoryOperationStatusResponse.class); - Assertions.assertEquals("n", model.status()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java deleted file mode 100644 index 7f3ea4ed81b6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTests.java +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.FormatReadSettings; -import com.azure.resourcemanager.datafactory.models.GetMetadataActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GetMetadataActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GetMetadataActivity model = BinaryData.fromString( - "{\"type\":\"GetMetadata\",\"typeProperties\":{\"dataset\":{\"referenceName\":\"cth\",\"parameters\":{\"wqpgbticni\":\"datarxmxqskemtajjfm\",\"vwalhawoptiq\":\"dataubocmjiib\",\"avtapcxsmap\":\"datau\",\"kuemcbtumtnrcv\":\"datadmmwylrvztaelpux\"}},\"fieldList\":[\"datayqexujlleweeg\",\"dataqbsythycdckcpfom\",\"datagfwxthrcmgsimgo\"],\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datahjgckkbn\",\"disableMetricsCollection\":\"dataenyehmwzgf\",\"\":{\"chzuap\":\"dataeolorosahg\",\"idyjffpu\":\"datahfhuuizyeyfki\",\"eymlctnnsjcuf\":\"dataykyvbp\",\"glxhb\":\"datajvaxuvazzptldaa\"}},\"formatSettings\":{\"type\":\"FormatReadSettings\",\"\":{\"htykebtvn\":\"datawinle\",\"riehooxqkc\":\"datadcclpbhntoiviue\",\"kyiqjtx\":\"datayydtnl\"}}},\"linkedServiceName\":{\"referenceName\":\"grf\",\"parameters\":{\"oczsrypfviiwjjqp\":\"datakjotvhiv\"}},\"policy\":{\"timeout\":\"dataxkeygmqnuyusnh\",\"retry\":\"dataekhfdlbcucwfc\",\"retryIntervalInSeconds\":420951717,\"secureInput\":true,\"secureOutput\":true,\"\":{\"pgllsrran\":\"dataldavozmibtkvf\",\"rro\":\"dataxxwt\",\"yldtt\":\"datalkgzczjwizrulrk\",\"myc\":\"datagcpqmkpobenaahdj\"}},\"name\":\"tvpeirhstwpbvw\",\"description\":\"hpphjimoecqpqk\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"zxdlrjspxoty\",\"dependencyConditions\":[\"Failed\"],\"\":{\"llcdqvunvnggqacf\":\"datafejiurldsft\",\"dzruuscbs\":\"dataruwqbe\"}},{\"activity\":\"tjdioevifzqqs\",\"dependencyConditions\":[\"Failed\"],\"\":{\"jxsofsiritp\":\"datappphwvduuzpiooa\",\"nrl\":\"dataqp\",\"v\":\"dataoux\",\"cjnfyubc\":\"dataz\"}},{\"activity\":\"wnfnqqnumpnav\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"nbofeucctppbgzf\":\"datafpvbsllyoriad\"}}],\"userProperties\":[{\"name\":\"lsk\",\"value\":\"datavvwd\"},{\"name\":\"trqsobusurxv\",\"value\":\"datadxlbsnskcksf\"},{\"name\":\"tknywxpmef\",\"value\":\"datanccbvchozkmifyxd\"}],\"\":{\"roidhbulvkis\":\"databisfnbtqdrkwri\"}}") - .toObject(GetMetadataActivity.class); - Assertions.assertEquals("tvpeirhstwpbvw", model.name()); - Assertions.assertEquals("hpphjimoecqpqk", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("zxdlrjspxoty", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lsk", model.userProperties().get(0).name()); - Assertions.assertEquals("grf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(420951717, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("cth", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GetMetadataActivity model = new GetMetadataActivity().withName("tvpeirhstwpbvw") - .withDescription("hpphjimoecqpqk") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("zxdlrjspxoty") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tjdioevifzqqs") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wnfnqqnumpnav") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("lsk").withValue("datavvwd"), - new UserProperty().withName("trqsobusurxv").withValue("datadxlbsnskcksf"), - new UserProperty().withName("tknywxpmef").withValue("datanccbvchozkmifyxd"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("grf") - .withParameters(mapOf("oczsrypfviiwjjqp", "datakjotvhiv"))) - .withPolicy(new ActivityPolicy().withTimeout("dataxkeygmqnuyusnh") - .withRetry("dataekhfdlbcucwfc") - .withRetryIntervalInSeconds(420951717) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withDataset(new DatasetReference().withReferenceName("cth") - .withParameters(mapOf("wqpgbticni", "datarxmxqskemtajjfm", "vwalhawoptiq", "dataubocmjiib", - "avtapcxsmap", "datau", "kuemcbtumtnrcv", "datadmmwylrvztaelpux"))) - .withFieldList(Arrays.asList("datayqexujlleweeg", "dataqbsythycdckcpfom", "datagfwxthrcmgsimgo")) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datahjgckkbn") - .withDisableMetricsCollection("dataenyehmwzgf") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings"))); - model = BinaryData.fromObject(model).toObject(GetMetadataActivity.class); - Assertions.assertEquals("tvpeirhstwpbvw", model.name()); - Assertions.assertEquals("hpphjimoecqpqk", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("zxdlrjspxoty", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lsk", model.userProperties().get(0).name()); - Assertions.assertEquals("grf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(420951717, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("cth", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java deleted file mode 100644 index 6e8e4a2ff42e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetMetadataActivityTypePropertiesTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GetMetadataActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.FormatReadSettings; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GetMetadataActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GetMetadataActivityTypeProperties model = BinaryData.fromString( - "{\"dataset\":{\"referenceName\":\"hnfqnekpxddde\",\"parameters\":{\"lq\":\"datagdjahnsmktk\"}},\"fieldList\":[\"datadolobtzrgxnlaur\"],\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datantcbl\",\"disableMetricsCollection\":\"datazoqtfbjk\",\"\":{\"axttoenfohipijfy\":\"datatelblbungrkjb\",\"ecktcwgnkxjd\":\"datammqzbznrjw\",\"ndz\":\"dataxdi\"}},\"formatSettings\":{\"type\":\"FormatReadSettings\",\"\":{\"mkeaw\":\"datapiaklefwaiabfnt\",\"ljbnfw\":\"datafeudcg\",\"bpgskgpwspxhhnv\":\"dataffnngiu\",\"fstizemakgzcmbg\":\"datapzjtiktgmdlw\"}}}") - .toObject(GetMetadataActivityTypeProperties.class); - Assertions.assertEquals("hnfqnekpxddde", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GetMetadataActivityTypeProperties model = new GetMetadataActivityTypeProperties() - .withDataset(new DatasetReference().withReferenceName("hnfqnekpxddde") - .withParameters(mapOf("lq", "datagdjahnsmktk"))) - .withFieldList(Arrays.asList("datadolobtzrgxnlaur")) - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datantcbl") - .withDisableMetricsCollection("datazoqtfbjk") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new FormatReadSettings().withAdditionalProperties(mapOf("type", "FormatReadSettings"))); - model = BinaryData.fromObject(model).toObject(GetMetadataActivityTypeProperties.class); - Assertions.assertEquals("hnfqnekpxddde", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java deleted file mode 100644 index f13cc9b45f10..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GetSsisObjectMetadataRequestTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest; -import org.junit.jupiter.api.Assertions; - -public final class GetSsisObjectMetadataRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GetSsisObjectMetadataRequest model = BinaryData.fromString("{\"metadataPath\":\"jriplrbpbewtghf\"}") - .toObject(GetSsisObjectMetadataRequest.class); - Assertions.assertEquals("jriplrbpbewtghf", model.metadataPath()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GetSsisObjectMetadataRequest model = new GetSsisObjectMetadataRequest().withMetadataPath("jriplrbpbewtghf"); - model = BinaryData.fromObject(model).toObject(GetSsisObjectMetadataRequest.class); - Assertions.assertEquals("jriplrbpbewtghf", model.metadataPath()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java deleted file mode 100644 index fed189ed4213..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterListResponseTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GlobalParameterResourceInner; -import com.azure.resourcemanager.datafactory.models.GlobalParameterListResponse; -import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GlobalParameterListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GlobalParameterListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"nuilee\":{\"type\":\"Array\",\"value\":\"datamyqwcab\"},\"rxwtoaukhfkvc\":{\"type\":\"Object\",\"value\":\"dataswlpaugmrmfj\"},\"jwuive\":{\"type\":\"Int\",\"value\":\"dataizmoaeds\"},\"xeiqbpsmg\":{\"type\":\"String\",\"value\":\"datacgyee\"}},\"name\":\"guamlj\",\"type\":\"rgmsplzga\",\"etag\":\"cshhv\",\"id\":\"wgnxkympqanxrj\"},{\"properties\":{\"taoypnyghshxc\":{\"type\":\"Bool\",\"value\":\"datatw\"}},\"name\":\"hkgmnsg\",\"type\":\"xycphdrwjjkh\",\"etag\":\"omacluzvxnqmhr\",\"id\":\"pd\"}],\"nextLink\":\"mkoisqcssf\"}") - .toObject(GlobalParameterListResponse.class); - Assertions.assertEquals("wgnxkympqanxrj", model.value().get(0).id()); - Assertions.assertEquals(GlobalParameterType.ARRAY, model.value().get(0).properties().get("nuilee").type()); - Assertions.assertEquals("mkoisqcssf", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GlobalParameterListResponse model = new GlobalParameterListResponse() - .withValue(Arrays.asList( - new GlobalParameterResourceInner().withId("wgnxkympqanxrj") - .withProperties(mapOf("nuilee", - new GlobalParameterSpecification().withType(GlobalParameterType.ARRAY).withValue("datamyqwcab"), - "rxwtoaukhfkvc", - new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT) - .withValue("dataswlpaugmrmfj"), - "jwuive", - new GlobalParameterSpecification().withType(GlobalParameterType.INT).withValue("dataizmoaeds"), - "xeiqbpsmg", - new GlobalParameterSpecification().withType(GlobalParameterType.STRING) - .withValue("datacgyee"))), - new GlobalParameterResourceInner().withId("pd") - .withProperties(mapOf("taoypnyghshxc", - new GlobalParameterSpecification().withType(GlobalParameterType.BOOL).withValue("datatw"))))) - .withNextLink("mkoisqcssf"); - model = BinaryData.fromObject(model).toObject(GlobalParameterListResponse.class); - Assertions.assertEquals("wgnxkympqanxrj", model.value().get(0).id()); - Assertions.assertEquals(GlobalParameterType.ARRAY, model.value().get(0).properties().get("nuilee").type()); - Assertions.assertEquals("mkoisqcssf", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java deleted file mode 100644 index 208aa7705149..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterResourceInnerTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GlobalParameterResourceInner; -import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GlobalParameterResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GlobalParameterResourceInner model = BinaryData.fromString( - "{\"properties\":{\"zsylollgt\":{\"type\":\"String\",\"value\":\"dataifmcsypobkdqzr\"},\"rkihcirld\":{\"type\":\"Float\",\"value\":\"datazzydmxzjijpvua\"},\"urnnqbnqbpiz\":{\"type\":\"Object\",\"value\":\"dataxrdcoxnbkkja\"},\"fihwu\":{\"type\":\"Float\",\"value\":\"dataltgrdogypxrxv\"}},\"name\":\"ctafsrbxrblm\",\"type\":\"owxih\",\"etag\":\"nxw\",\"id\":\"gnepz\"}") - .toObject(GlobalParameterResourceInner.class); - Assertions.assertEquals("gnepz", model.id()); - Assertions.assertEquals(GlobalParameterType.STRING, model.properties().get("zsylollgt").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GlobalParameterResourceInner model = new GlobalParameterResourceInner().withId("gnepz") - .withProperties(mapOf("zsylollgt", - new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("dataifmcsypobkdqzr"), - "rkihcirld", - new GlobalParameterSpecification().withType(GlobalParameterType.FLOAT).withValue("datazzydmxzjijpvua"), - "urnnqbnqbpiz", - new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT).withValue("dataxrdcoxnbkkja"), - "fihwu", - new GlobalParameterSpecification().withType(GlobalParameterType.FLOAT).withValue("dataltgrdogypxrxv"))); - model = BinaryData.fromObject(model).toObject(GlobalParameterResourceInner.class); - Assertions.assertEquals("gnepz", model.id()); - Assertions.assertEquals(GlobalParameterType.STRING, model.properties().get("zsylollgt").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java deleted file mode 100644 index 4c2a63edd827..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParameterSpecificationTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import org.junit.jupiter.api.Assertions; - -public final class GlobalParameterSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GlobalParameterSpecification model = BinaryData.fromString("{\"type\":\"String\",\"value\":\"datarm\"}") - .toObject(GlobalParameterSpecification.class); - Assertions.assertEquals(GlobalParameterType.STRING, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GlobalParameterSpecification model - = new GlobalParameterSpecification().withType(GlobalParameterType.STRING).withValue("datarm"); - model = BinaryData.fromObject(model).toObject(GlobalParameterSpecification.class); - Assertions.assertEquals(GlobalParameterType.STRING, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 5196f16acd89..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; -import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class GlobalParametersCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"rzmkte\":{\"type\":\"Object\",\"value\":\"datatgv\"}},\"name\":\"euxxtslhjcwlfz\",\"type\":\"pwexcktgpcccg\",\"etag\":\"knjjskzuh\",\"id\":\"yavfeyybyduy\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - GlobalParameterResource response = manager.globalParameters() - .define("z") - .withExistingFactory("zhprlx", "bm") - .withProperties(mapOf("wv", - new GlobalParameterSpecification().withType(GlobalParameterType.OBJECT).withValue("datajehsv"), "lcstu", - new GlobalParameterSpecification().withType(GlobalParameterType.BOOL) - .withValue("datancifdxtibvqrhzpf"))) - .create(); - - Assertions.assertEquals("yavfeyybyduy", response.id()); - Assertions.assertEquals(GlobalParameterType.OBJECT, response.properties().get("rzmkte").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java deleted file mode 100644 index 8b764ff0a27a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class GlobalParametersDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.globalParameters() - .deleteWithResponse("lff", "fjskndwywbptvym", "mpdcddbe", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java deleted file mode 100644 index 2a363d2b068d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersGetWithResponseMockTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class GlobalParametersGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"jwqx\":{\"type\":\"Array\",\"value\":\"datadzswvfwiu\"},\"wvbiryxsaxwu\":{\"type\":\"Object\",\"value\":\"datatfzgdq\"},\"ofakmopqfzvvtif\":{\"type\":\"Object\",\"value\":\"datansesxwkhkcd\"},\"vbpnrfucxtmhm\":{\"type\":\"Int\",\"value\":\"datasuemewfut\"}},\"name\":\"npsdpf\",\"type\":\"cwwbunfymbwinur\",\"etag\":\"t\",\"id\":\"jokttqgokhajuylk\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - GlobalParameterResource response = manager.globalParameters() - .getWithResponse("tsolxnhlrpsign", "is", "zobpxfgp", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("jokttqgokhajuylk", response.id()); - Assertions.assertEquals(GlobalParameterType.ARRAY, response.properties().get("jwqx").type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java deleted file mode 100644 index e0bc3f91dbed..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GlobalParametersListByFactoryMockTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.GlobalParameterResource; -import com.azure.resourcemanager.datafactory.models.GlobalParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class GlobalParametersListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"sxhiuhgvgnolu\":{\"type\":\"Object\",\"value\":\"datalvwbgbmpit\"}},\"name\":\"fdofnp\",\"type\":\"v\",\"etag\":\"ymkguvrdqnproyt\",\"id\":\"nelqcvmvpp\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.globalParameters().listByFactory("rhwdom", "ythsl", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("nelqcvmvpp", response.iterator().next().id()); - Assertions.assertEquals(GlobalParameterType.OBJECT, - response.iterator().next().properties().get("sxhiuhgvgnolu").type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java deleted file mode 100644 index b24658332a71..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.GoogleAdWordsObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GoogleAdWordsObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleAdWordsObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleAdWordsObject\",\"typeProperties\":{\"tableName\":\"datamhp\"},\"description\":\"sfgvrvq\",\"structure\":\"datawbdrwroqkljnzpqh\",\"schema\":\"datasarkyulfa\",\"linkedServiceName\":{\"referenceName\":\"ea\",\"parameters\":{\"geytlplslfc\":\"dataqenhekzaz\",\"ksuowt\":\"datae\",\"rhnxzmfvmw\":\"datalkyqfnjo\",\"rawwhyxf\":\"datanrtc\"}},\"parameters\":{\"uns\":{\"type\":\"String\",\"defaultValue\":\"datadmvwn\"}},\"annotations\":[\"dataevzshqykebmps\",\"dataaezc\",\"datadkckr\"],\"folder\":{\"name\":\"qdmhcejstfs\"},\"\":{\"wxqd\":\"datajakgk\",\"wdjox\":\"dataoqzh\",\"sobvcnsb\":\"datakbd\"}}") - .toObject(GoogleAdWordsObjectDataset.class); - Assertions.assertEquals("sfgvrvq", model.description()); - Assertions.assertEquals("ea", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("uns").type()); - Assertions.assertEquals("qdmhcejstfs", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleAdWordsObjectDataset model = new GoogleAdWordsObjectDataset().withDescription("sfgvrvq") - .withStructure("datawbdrwroqkljnzpqh") - .withSchema("datasarkyulfa") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ea") - .withParameters(mapOf("geytlplslfc", "dataqenhekzaz", "ksuowt", "datae", "rhnxzmfvmw", "datalkyqfnjo", - "rawwhyxf", "datanrtc"))) - .withParameters( - mapOf("uns", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datadmvwn"))) - .withAnnotations(Arrays.asList("dataevzshqykebmps", "dataaezc", "datadkckr")) - .withFolder(new DatasetFolder().withName("qdmhcejstfs")) - .withTableName("datamhp"); - model = BinaryData.fromObject(model).toObject(GoogleAdWordsObjectDataset.class); - Assertions.assertEquals("sfgvrvq", model.description()); - Assertions.assertEquals("ea", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("uns").type()); - Assertions.assertEquals("qdmhcejstfs", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java deleted file mode 100644 index 938f1fb1964c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleAdWordsSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GoogleAdWordsSource; - -public final class GoogleAdWordsSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleAdWordsSource model = BinaryData.fromString( - "{\"type\":\"GoogleAdWordsSource\",\"query\":\"datasclpnbidnlodk\",\"queryTimeout\":\"dataqnkptixa\",\"additionalColumns\":\"datay\",\"sourceRetryCount\":\"dataaevry\",\"sourceRetryWait\":\"datagccpzmh\",\"maxConcurrentConnections\":\"datalqtzgtpsbym\",\"disableMetricsCollection\":\"datactorqzbq\",\"\":{\"ahbynlbwcnnfp\":\"datafqqrarolc\",\"lehgeeygsojtza\":\"datagstdifbyfj\",\"dsjhx\":\"dataliigrtvqv\"}}") - .toObject(GoogleAdWordsSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleAdWordsSource model = new GoogleAdWordsSource().withSourceRetryCount("dataaevry") - .withSourceRetryWait("datagccpzmh") - .withMaxConcurrentConnections("datalqtzgtpsbym") - .withDisableMetricsCollection("datactorqzbq") - .withQueryTimeout("dataqnkptixa") - .withAdditionalColumns("datay") - .withQuery("datasclpnbidnlodk"); - model = BinaryData.fromObject(model).toObject(GoogleAdWordsSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java deleted file mode 100644 index bce29aaaa7f9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryDatasetTypeProperties; - -public final class GoogleBigQueryDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQueryDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datashennmsgpywdib\",\"table\":\"datavnrgalv\",\"dataset\":\"datahry\"}") - .toObject(GoogleBigQueryDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQueryDatasetTypeProperties model - = new GoogleBigQueryDatasetTypeProperties().withTableName("datashennmsgpywdib") - .withTable("datavnrgalv") - .withDataset("datahry"); - model = BinaryData.fromObject(model).toObject(GoogleBigQueryDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java deleted file mode 100644 index f41ea83185c3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.GoogleBigQueryObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GoogleBigQueryObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQueryObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryObject\",\"typeProperties\":{\"tableName\":\"databldpoiaffjkrtn\",\"table\":\"dataevimxmaxcj\",\"dataset\":\"dataitygvdwds\"},\"description\":\"tb\",\"structure\":\"datav\",\"schema\":\"datazbzchnqekwan\",\"linkedServiceName\":{\"referenceName\":\"lpurlcydjhtk\",\"parameters\":{\"lr\":\"datarwiyndurdonkgobx\"}},\"parameters\":{\"froefq\":{\"type\":\"SecureString\",\"defaultValue\":\"datarswknpdrgnmza\"}},\"annotations\":[\"dataevyrejyoybkqftu\"],\"folder\":{\"name\":\"m\"},\"\":{\"wsicvwqzoc\":\"datavvbucnnrovomep\"}}") - .toObject(GoogleBigQueryObjectDataset.class); - Assertions.assertEquals("tb", model.description()); - Assertions.assertEquals("lpurlcydjhtk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("froefq").type()); - Assertions.assertEquals("m", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQueryObjectDataset model = new GoogleBigQueryObjectDataset().withDescription("tb") - .withStructure("datav") - .withSchema("datazbzchnqekwan") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lpurlcydjhtk") - .withParameters(mapOf("lr", "datarwiyndurdonkgobx"))) - .withParameters(mapOf("froefq", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datarswknpdrgnmza"))) - .withAnnotations(Arrays.asList("dataevyrejyoybkqftu")) - .withFolder(new DatasetFolder().withName("m")) - .withTableName("databldpoiaffjkrtn") - .withTable("dataevimxmaxcj") - .withDataset("dataitygvdwds"); - model = BinaryData.fromObject(model).toObject(GoogleBigQueryObjectDataset.class); - Assertions.assertEquals("tb", model.description()); - Assertions.assertEquals("lpurlcydjhtk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("froefq").type()); - Assertions.assertEquals("m", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java deleted file mode 100644 index 1ddcaf649caa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQuerySourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GoogleBigQuerySource; - -public final class GoogleBigQuerySourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQuerySource model = BinaryData.fromString( - "{\"type\":\"GoogleBigQuerySource\",\"query\":\"datawaejxzkqcmddc\",\"queryTimeout\":\"datanxyr\",\"additionalColumns\":\"dataegabsfjrjzdqscgo\",\"sourceRetryCount\":\"datagd\",\"sourceRetryWait\":\"dataepgfrb\",\"maxConcurrentConnections\":\"dataoeh\",\"disableMetricsCollection\":\"datawwsgqziwo\",\"\":{\"okckxfk\":\"datawjssyazmmbuxq\",\"qf\":\"datatqkbyruheawuc\",\"jguwts\":\"datarbtbogxlyvebv\"}}") - .toObject(GoogleBigQuerySource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQuerySource model = new GoogleBigQuerySource().withSourceRetryCount("datagd") - .withSourceRetryWait("dataepgfrb") - .withMaxConcurrentConnections("dataoeh") - .withDisableMetricsCollection("datawwsgqziwo") - .withQueryTimeout("datanxyr") - .withAdditionalColumns("dataegabsfjrjzdqscgo") - .withQuery("datawaejxzkqcmddc"); - model = BinaryData.fromObject(model).toObject(GoogleBigQuerySource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java deleted file mode 100644 index b67238f0a0c1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2DatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GoogleBigQueryV2DatasetTypeProperties; - -public final class GoogleBigQueryV2DatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQueryV2DatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"dataxnguwn\",\"dataset\":\"datapu\"}") - .toObject(GoogleBigQueryV2DatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQueryV2DatasetTypeProperties model - = new GoogleBigQueryV2DatasetTypeProperties().withTable("dataxnguwn").withDataset("datapu"); - model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2DatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java deleted file mode 100644 index e72b5f689d3f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2ObjectDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2ObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GoogleBigQueryV2ObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQueryV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryV2Object\",\"typeProperties\":{\"table\":\"databknuubxcw\",\"dataset\":\"datatupqdvnpyeevff\"},\"description\":\"ujgtdowlxmwefcb\",\"structure\":\"datapchrtczwjcujy\",\"schema\":\"datavyrjqdjlgk\",\"linkedServiceName\":{\"referenceName\":\"i\",\"parameters\":{\"ircvnfgbdv\":\"dataeuwi\",\"jnyexbvxgxqq\":\"datawxohktxagfujdb\",\"tpvev\":\"dataasfeooq\",\"klqlii\":\"dataarp\"}},\"parameters\":{\"dgsebjuymtevae\":{\"type\":\"Array\",\"defaultValue\":\"datawgjnofgij\"},\"hrjkejvaedogz\":{\"type\":\"SecureString\",\"defaultValue\":\"datawy\"},\"tenfdvdoearywusr\":{\"type\":\"Object\",\"defaultValue\":\"dataxbxxgjogcphivfhr\"}},\"annotations\":[\"datadt\",\"datasyfezfsmyljd\",\"datayyrwnmwtqi\"],\"folder\":{\"name\":\"nnkynkstd\"},\"\":{\"wvaosckfavhk\":\"datahjfphfxaqjyihjc\",\"weifdyfa\":\"datapsp\"}}") - .toObject(GoogleBigQueryV2ObjectDataset.class); - Assertions.assertEquals("ujgtdowlxmwefcb", model.description()); - Assertions.assertEquals("i", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("dgsebjuymtevae").type()); - Assertions.assertEquals("nnkynkstd", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQueryV2ObjectDataset model = new GoogleBigQueryV2ObjectDataset().withDescription("ujgtdowlxmwefcb") - .withStructure("datapchrtczwjcujy") - .withSchema("datavyrjqdjlgk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("i") - .withParameters(mapOf("ircvnfgbdv", "dataeuwi", "jnyexbvxgxqq", "datawxohktxagfujdb", "tpvev", - "dataasfeooq", "klqlii", "dataarp"))) - .withParameters(mapOf("dgsebjuymtevae", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datawgjnofgij"), - "hrjkejvaedogz", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datawy"), - "tenfdvdoearywusr", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataxbxxgjogcphivfhr"))) - .withAnnotations(Arrays.asList("datadt", "datasyfezfsmyljd", "datayyrwnmwtqi")) - .withFolder(new DatasetFolder().withName("nnkynkstd")) - .withTable("databknuubxcw") - .withDataset("datatupqdvnpyeevff"); - model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2ObjectDataset.class); - Assertions.assertEquals("ujgtdowlxmwefcb", model.description()); - Assertions.assertEquals("i", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("dgsebjuymtevae").type()); - Assertions.assertEquals("nnkynkstd", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java deleted file mode 100644 index 1b976e1a56dc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleBigQueryV2SourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GoogleBigQueryV2Source; - -public final class GoogleBigQueryV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleBigQueryV2Source model = BinaryData.fromString( - "{\"type\":\"GoogleBigQueryV2Source\",\"query\":\"datajwiz\",\"queryTimeout\":\"dataifz\",\"additionalColumns\":\"dataxtykjrdxlximvr\",\"sourceRetryCount\":\"datajja\",\"sourceRetryWait\":\"dataaskullvtsauj\",\"maxConcurrentConnections\":\"datahtz\",\"disableMetricsCollection\":\"datazqrpfhzxkjyg\",\"\":{\"jcozbnmthxcm\":\"datadgwdha\",\"exn\":\"dataq\",\"msmzykpnjgi\":\"datapvox\"}}") - .toObject(GoogleBigQueryV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleBigQueryV2Source model = new GoogleBigQueryV2Source().withSourceRetryCount("datajja") - .withSourceRetryWait("dataaskullvtsauj") - .withMaxConcurrentConnections("datahtz") - .withDisableMetricsCollection("datazqrpfhzxkjyg") - .withQueryTimeout("dataifz") - .withAdditionalColumns("dataxtykjrdxlximvr") - .withQuery("datajwiz"); - model = BinaryData.fromObject(model).toObject(GoogleBigQueryV2Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java deleted file mode 100644 index 2d2787921ff4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageLocationTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GoogleCloudStorageLocation; - -public final class GoogleCloudStorageLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleCloudStorageLocation model = BinaryData.fromString( - "{\"type\":\"GoogleCloudStorageLocation\",\"bucketName\":\"datarulcfogx\",\"version\":\"dataxnwjtpfdzxcouz\",\"folderPath\":\"dataofwakukzk\",\"fileName\":\"datazxsoednl\",\"\":{\"isnionetbzdrdpue\":\"dataihezomucmq\"}}") - .toObject(GoogleCloudStorageLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleCloudStorageLocation model = new GoogleCloudStorageLocation().withFolderPath("dataofwakukzk") - .withFileName("datazxsoednl") - .withBucketName("datarulcfogx") - .withVersion("dataxnwjtpfdzxcouz"); - model = BinaryData.fromObject(model).toObject(GoogleCloudStorageLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java deleted file mode 100644 index 74cd4d9051da..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GoogleCloudStorageReadSettingsTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GoogleCloudStorageReadSettings; - -public final class GoogleCloudStorageReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GoogleCloudStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"GoogleCloudStorageReadSettings\",\"recursive\":\"dataxjtu\",\"wildcardFolderPath\":\"datadjwszxmc\",\"wildcardFileName\":\"datany\",\"prefix\":\"dataadvfqrv\",\"fileListPath\":\"datamncwjdmjdph\",\"enablePartitionDiscovery\":\"datapjrbnzbe\",\"partitionRootPath\":\"dataqbfimageiqjn\",\"deleteFilesAfterCompletion\":\"datarhxsktu\",\"modifiedDatetimeStart\":\"datapjmevsz\",\"modifiedDatetimeEnd\":\"datanqktokcjckmzbh\",\"maxConcurrentConnections\":\"datadxnhinnkakqw\",\"disableMetricsCollection\":\"datazxltjsfxxkgx\",\"\":{\"hhtfmhma\":\"datanocrbtazop\",\"xcrh\":\"datanppucfvxqr\",\"umiuxxmrntphyj\":\"dataecnv\",\"a\":\"dataeddtkpys\"}}") - .toObject(GoogleCloudStorageReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GoogleCloudStorageReadSettings model - = new GoogleCloudStorageReadSettings().withMaxConcurrentConnections("datadxnhinnkakqw") - .withDisableMetricsCollection("datazxltjsfxxkgx") - .withRecursive("dataxjtu") - .withWildcardFolderPath("datadjwszxmc") - .withWildcardFileName("datany") - .withPrefix("dataadvfqrv") - .withFileListPath("datamncwjdmjdph") - .withEnablePartitionDiscovery("datapjrbnzbe") - .withPartitionRootPath("dataqbfimageiqjn") - .withDeleteFilesAfterCompletion("datarhxsktu") - .withModifiedDatetimeStart("datapjmevsz") - .withModifiedDatetimeEnd("datanqktokcjckmzbh"); - model = BinaryData.fromObject(model).toObject(GoogleCloudStorageReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java deleted file mode 100644 index aa0e9bb541c6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.GreenplumDatasetTypeProperties; - -public final class GreenplumDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GreenplumDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"dataemzcyniapypimrx\",\"table\":\"dataqwipzesstuin\",\"schema\":\"datakmlf\"}") - .toObject(GreenplumDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GreenplumDatasetTypeProperties model = new GreenplumDatasetTypeProperties().withTableName("dataemzcyniapypimrx") - .withTable("dataqwipzesstuin") - .withSchema("datakmlf"); - model = BinaryData.fromObject(model).toObject(GreenplumDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java deleted file mode 100644 index 46959ee1e62c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.GreenplumSource; - -public final class GreenplumSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GreenplumSource model = BinaryData.fromString( - "{\"type\":\"GreenplumSource\",\"query\":\"datauztb\",\"queryTimeout\":\"datatfmcnrgwgcsto\",\"additionalColumns\":\"dataveehmvr\",\"sourceRetryCount\":\"dataurpzry\",\"sourceRetryWait\":\"datafdhch\",\"maxConcurrentConnections\":\"datawahaxyrdlvb\",\"disableMetricsCollection\":\"datahfqsjzlckt\",\"\":{\"yhxgnlpjytle\":\"dataxtee\",\"krcohhuwzun\":\"dataymijhn\",\"b\":\"datazbdeyhwebh\",\"lynd\":\"dataocfvajmmdmb\"}}") - .toObject(GreenplumSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GreenplumSource model = new GreenplumSource().withSourceRetryCount("dataurpzry") - .withSourceRetryWait("datafdhch") - .withMaxConcurrentConnections("datawahaxyrdlvb") - .withDisableMetricsCollection("datahfqsjzlckt") - .withQueryTimeout("datatfmcnrgwgcsto") - .withAdditionalColumns("dataveehmvr") - .withQuery("datauztb"); - model = BinaryData.fromObject(model).toObject(GreenplumSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java deleted file mode 100644 index b4748c409a19..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/GreenplumTableDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.GreenplumTableDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class GreenplumTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GreenplumTableDataset model = BinaryData.fromString( - "{\"type\":\"GreenplumTable\",\"typeProperties\":{\"tableName\":\"dataihsujtg\",\"table\":\"databszam\",\"schema\":\"dataxejpd\"},\"description\":\"iqwzutiyelrn\",\"structure\":\"datajt\",\"schema\":\"dataqsznsyovqm\",\"linkedServiceName\":{\"referenceName\":\"cudptoqwrnf\",\"parameters\":{\"kngejjxumowy\":\"datathlokmxwawfu\",\"drsjtmn\":\"datajmoozmxuk\"}},\"parameters\":{\"zcfdtstiaxtyrnu\":{\"type\":\"Bool\",\"defaultValue\":\"dataw\"},\"cmlroiommemso\":{\"type\":\"SecureString\",\"defaultValue\":\"datahepisq\"},\"ckcesrsi\":{\"type\":\"Bool\",\"defaultValue\":\"datalcyeqdobobaqcabe\"}},\"annotations\":[\"datalpjcxbjgfmyqyyfr\"],\"folder\":{\"name\":\"zfpsf\"},\"\":{\"qafz\":\"dataidfhmlxrqkeknum\",\"p\":\"dataptriysjrgtruwpu\",\"vvoydwedggwg\":\"datarcek\",\"oarsrdr\":\"datalvbwatzadrjbjn\"}}") - .toObject(GreenplumTableDataset.class); - Assertions.assertEquals("iqwzutiyelrn", model.description()); - Assertions.assertEquals("cudptoqwrnf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zcfdtstiaxtyrnu").type()); - Assertions.assertEquals("zfpsf", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GreenplumTableDataset model = new GreenplumTableDataset().withDescription("iqwzutiyelrn") - .withStructure("datajt") - .withSchema("dataqsznsyovqm") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cudptoqwrnf") - .withParameters(mapOf("kngejjxumowy", "datathlokmxwawfu", "drsjtmn", "datajmoozmxuk"))) - .withParameters(mapOf("zcfdtstiaxtyrnu", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataw"), "cmlroiommemso", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahepisq"), - "ckcesrsi", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalcyeqdobobaqcabe"))) - .withAnnotations(Arrays.asList("datalpjcxbjgfmyqyyfr")) - .withFolder(new DatasetFolder().withName("zfpsf")) - .withTableName("dataihsujtg") - .withTable("databszam") - .withSchemaTypePropertiesSchema("dataxejpd"); - model = BinaryData.fromObject(model).toObject(GreenplumTableDataset.class); - Assertions.assertEquals("iqwzutiyelrn", model.description()); - Assertions.assertEquals("cudptoqwrnf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zcfdtstiaxtyrnu").type()); - Assertions.assertEquals("zfpsf", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java deleted file mode 100644 index 58c7f7383e32..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.HBaseObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HBaseObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HBaseObjectDataset model = BinaryData.fromString( - "{\"type\":\"HBaseObject\",\"typeProperties\":{\"tableName\":\"datazcxxvzjoyxjgahx\"},\"description\":\"rr\",\"structure\":\"datakt\",\"schema\":\"datatkbcer\",\"linkedServiceName\":{\"referenceName\":\"cvcp\",\"parameters\":{\"kmpwyvlhnh\":\"dataj\"}},\"parameters\":{\"lrcygotohzwto\":{\"type\":\"Int\",\"defaultValue\":\"datalebgjgylsacagi\"},\"jzelsriemvupmea\":{\"type\":\"Float\",\"defaultValue\":\"databxitrapwzhlutj\"},\"vldeehcbsaip\":{\"type\":\"Float\",\"defaultValue\":\"dataysy\"}},\"annotations\":[\"dataofkegbvbbdledffl\",\"datavsluazzxfjv\"],\"folder\":{\"name\":\"pxzee\"},\"\":{\"qdczmrjgobekx\":\"datapbscboxr\",\"rzzbs\":\"dataheowsecaf\",\"rjsbdbm\":\"datai\",\"vmrfaptndrmmn\":\"dataeesacuicnvqiqsu\"}}") - .toObject(HBaseObjectDataset.class); - Assertions.assertEquals("rr", model.description()); - Assertions.assertEquals("cvcp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("lrcygotohzwto").type()); - Assertions.assertEquals("pxzee", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HBaseObjectDataset model = new HBaseObjectDataset().withDescription("rr") - .withStructure("datakt") - .withSchema("datatkbcer") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("cvcp").withParameters(mapOf("kmpwyvlhnh", "dataj"))) - .withParameters(mapOf("lrcygotohzwto", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalebgjgylsacagi"), - "jzelsriemvupmea", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("databxitrapwzhlutj"), - "vldeehcbsaip", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataysy"))) - .withAnnotations(Arrays.asList("dataofkegbvbbdledffl", "datavsluazzxfjv")) - .withFolder(new DatasetFolder().withName("pxzee")) - .withTableName("datazcxxvzjoyxjgahx"); - model = BinaryData.fromObject(model).toObject(HBaseObjectDataset.class); - Assertions.assertEquals("rr", model.description()); - Assertions.assertEquals("cvcp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("lrcygotohzwto").type()); - Assertions.assertEquals("pxzee", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java deleted file mode 100644 index f9ff80d33a99..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HBaseSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HBaseSource; - -public final class HBaseSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HBaseSource model = BinaryData.fromString( - "{\"type\":\"HBaseSource\",\"query\":\"dataujfzxsazuj\",\"queryTimeout\":\"datawwtlerhpfrarqnj\",\"additionalColumns\":\"datahsxhtvnq\",\"sourceRetryCount\":\"datarrgmlw\",\"sourceRetryWait\":\"datam\",\"maxConcurrentConnections\":\"datacsddlcnwbijxf\",\"disableMetricsCollection\":\"datageffrghwdmr\",\"\":{\"vdrggucwa\":\"datarrg\"}}") - .toObject(HBaseSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HBaseSource model = new HBaseSource().withSourceRetryCount("datarrgmlw") - .withSourceRetryWait("datam") - .withMaxConcurrentConnections("datacsddlcnwbijxf") - .withDisableMetricsCollection("datageffrghwdmr") - .withQueryTimeout("datawwtlerhpfrarqnj") - .withAdditionalColumns("datahsxhtvnq") - .withQuery("dataujfzxsazuj"); - model = BinaryData.fromObject(model).toObject(HBaseSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java deleted file mode 100644 index 8edefce169ac..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTests.java +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.HDInsightHiveActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightHiveActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightHiveActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightHive\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"dhjmpn\",\"parameters\":{\"vytfuqzstqbx\":\"dataxshhljtku\"}},{\"referenceName\":\"yf\",\"parameters\":{\"lscokafaqqipv\":\"datajeitkfhzvscndb\"}},{\"referenceName\":\"vdzssssncghgidq\",\"parameters\":{\"mwjn\":\"dataxodbxzha\",\"hnzkmjoybyogwjr\":\"dataootcyyupaqdoo\",\"hxawohsj\":\"datasnryk\",\"yzvrixcveserltlh\":\"datawxphnlw\"}}],\"arguments\":[\"datajuopvkrmspksfxd\",\"datab\",\"datafyxweiqvhfyvkx\",\"dataoxsveiucuxwn\"],\"getDebugInfo\":\"Always\",\"scriptPath\":\"datargqmbnfvyg\",\"scriptLinkedService\":{\"referenceName\":\"dcf\",\"parameters\":{\"vcksznngguucpyt\":\"datapsy\",\"lbcc\":\"dataxnujwffthbziieo\",\"au\":\"datac\"}},\"defines\":{\"gcdx\":\"dataevjr\",\"bvvuqwljmzpyuk\":\"datagsteeksbksvvyvo\"},\"variables\":{\"bkceb\":\"datavhcgtctnqdcg\",\"nqqiqc\":\"datartputmtjsklkw\",\"n\":\"datamfxldqtm\",\"p\":\"dataejnemrfqjhc\"},\"queryTimeout\":635732002},\"linkedServiceName\":{\"referenceName\":\"bfgullqpcijyx\",\"parameters\":{\"ksror\":\"datacg\",\"hltlftr\":\"dataejfhar\"}},\"policy\":{\"timeout\":\"dataxzdujpuhbaog\",\"retry\":\"databkxdhavegysqsm\",\"retryIntervalInSeconds\":1182405642,\"secureInput\":true,\"secureOutput\":true,\"\":{\"qwmhcpujyg\":\"datavndvwwejvqpwyri\",\"cyw\":\"datatxregbssqt\",\"rzjjffliz\":\"dataoqxprsocfx\",\"jfaulwlze\":\"datau\"}},\"name\":\"ygnepj\",\"description\":\"xqdrphiyxjq\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"achpfzsfutaapbrw\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"cavk\":\"dataso\",\"teratvpkgaw\":\"datahyoigzwed\"}}],\"userProperties\":[{\"name\":\"jizdmhepfjdiwz\",\"value\":\"datawmumuc\"},{\"name\":\"vqwhscvaq\",\"value\":\"dataxgelnjgftqkgavgo\"},{\"name\":\"llxpaylkrast\",\"value\":\"datakskkziebm\"}],\"\":{\"brdbwwqtxpfofrf\":\"datadfmplgdxdt\",\"ev\":\"datahbbn\"}}") - .toObject(HDInsightHiveActivity.class); - Assertions.assertEquals("ygnepj", model.name()); - Assertions.assertEquals("xqdrphiyxjq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("achpfzsfutaapbrw", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("jizdmhepfjdiwz", model.userProperties().get(0).name()); - Assertions.assertEquals("bfgullqpcijyx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1182405642, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("dhjmpn", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dcf", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(635732002, model.queryTimeout()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightHiveActivity model = new HDInsightHiveActivity().withName("ygnepj") - .withDescription("xqdrphiyxjq") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("achpfzsfutaapbrw") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("jizdmhepfjdiwz").withValue("datawmumuc"), - new UserProperty().withName("vqwhscvaq").withValue("dataxgelnjgftqkgavgo"), - new UserProperty().withName("llxpaylkrast").withValue("datakskkziebm"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bfgullqpcijyx") - .withParameters(mapOf("ksror", "datacg", "hltlftr", "dataejfhar"))) - .withPolicy(new ActivityPolicy().withTimeout("dataxzdujpuhbaog") - .withRetry("databkxdhavegysqsm") - .withRetryIntervalInSeconds(1182405642) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("dhjmpn") - .withParameters(mapOf("vytfuqzstqbx", "dataxshhljtku")), - new LinkedServiceReference().withReferenceName("yf") - .withParameters(mapOf("lscokafaqqipv", "datajeitkfhzvscndb")), - new LinkedServiceReference().withReferenceName("vdzssssncghgidq") - .withParameters(mapOf("mwjn", "dataxodbxzha", "hnzkmjoybyogwjr", "dataootcyyupaqdoo", "hxawohsj", - "datasnryk", "yzvrixcveserltlh", "datawxphnlw")))) - .withArguments(Arrays.asList("datajuopvkrmspksfxd", "datab", "datafyxweiqvhfyvkx", "dataoxsveiucuxwn")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withScriptPath("datargqmbnfvyg") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("dcf") - .withParameters(mapOf("vcksznngguucpyt", "datapsy", "lbcc", "dataxnujwffthbziieo", "au", "datac"))) - .withDefines(mapOf("gcdx", "dataevjr", "bvvuqwljmzpyuk", "datagsteeksbksvvyvo")) - .withVariables(mapOf("bkceb", "datavhcgtctnqdcg", "nqqiqc", "datartputmtjsklkw", "n", "datamfxldqtm", "p", - "dataejnemrfqjhc")) - .withQueryTimeout(635732002); - model = BinaryData.fromObject(model).toObject(HDInsightHiveActivity.class); - Assertions.assertEquals("ygnepj", model.name()); - Assertions.assertEquals("xqdrphiyxjq", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("achpfzsfutaapbrw", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("jizdmhepfjdiwz", model.userProperties().get(0).name()); - Assertions.assertEquals("bfgullqpcijyx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1182405642, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("dhjmpn", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dcf", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(635732002, model.queryTimeout()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java deleted file mode 100644 index 47287caa5bb1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightHiveActivityTypePropertiesTests.java +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HDInsightHiveActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightHiveActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightHiveActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"lkdoukqscmds\",\"parameters\":{\"lgu\":\"dataws\",\"pqykicesqpvmoxil\":\"datapeqlhhmbyfacexp\",\"iubemxmuygmr\":\"datapkqiqs\",\"fmophtkyzsgayn\":\"datanrbngc\"}},{\"referenceName\":\"mowvcnvfgqxq\",\"parameters\":{\"slroqxrvycj\":\"datauapd\",\"mggy\":\"datani\"}},{\"referenceName\":\"mpm\",\"parameters\":{\"oyxdig\":\"databamtoqsea\",\"lefiott\":\"dataggzmylqhqeosxdsx\"}},{\"referenceName\":\"awgkaohhtttyhy\",\"parameters\":{\"kkx\":\"datazjjjfcyskp\",\"bxsmfvltboc\":\"datai\",\"rlgkoqbzrclar\":\"datahvtpmvppvgrigje\",\"amshqvku\":\"datatfmfkuvybemo\"}}],\"arguments\":[\"datavzqhv\",\"datajdsnv\",\"datadbeanigozjrcx\"],\"getDebugInfo\":\"Always\",\"scriptPath\":\"datajalmzpfylqevwwvz\",\"scriptLinkedService\":{\"referenceName\":\"dxcizropzgjle\",\"parameters\":{\"egoupdqeflvd\":\"datafbhqkvbinstqwn\",\"atwbbfjd\":\"dataaqcqlexobeekzyeb\",\"cahhfuydg\":\"datalhnwoh\",\"lcvibp\":\"datahitavgayusp\"}},\"defines\":{\"bdefepwkhruzzw\":\"datae\",\"vnlhsxeasxsqqu\":\"databbozivfoy\",\"dukp\":\"datavscbpkmo\",\"vmo\":\"dataqyibwu\"},\"variables\":{\"azuboig\":\"datairfk\",\"bbjzdv\":\"datarw\",\"zpvjwego\":\"dataqoilgkzn\"},\"queryTimeout\":660954053}") - .toObject(HDInsightHiveActivityTypeProperties.class); - Assertions.assertEquals("lkdoukqscmds", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dxcizropzgjle", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(660954053, model.queryTimeout()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightHiveActivityTypeProperties model = new HDInsightHiveActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("lkdoukqscmds") - .withParameters(mapOf("lgu", "dataws", "pqykicesqpvmoxil", "datapeqlhhmbyfacexp", "iubemxmuygmr", - "datapkqiqs", "fmophtkyzsgayn", "datanrbngc")), - new LinkedServiceReference().withReferenceName("mowvcnvfgqxq") - .withParameters(mapOf("slroqxrvycj", "datauapd", "mggy", "datani")), - new LinkedServiceReference().withReferenceName("mpm") - .withParameters(mapOf("oyxdig", "databamtoqsea", "lefiott", "dataggzmylqhqeosxdsx")), - new LinkedServiceReference().withReferenceName("awgkaohhtttyhy") - .withParameters(mapOf("kkx", "datazjjjfcyskp", "bxsmfvltboc", "datai", "rlgkoqbzrclar", - "datahvtpmvppvgrigje", "amshqvku", "datatfmfkuvybemo")))) - .withArguments(Arrays.asList("datavzqhv", "datajdsnv", "datadbeanigozjrcx")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withScriptPath("datajalmzpfylqevwwvz") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("dxcizropzgjle") - .withParameters(mapOf("egoupdqeflvd", "datafbhqkvbinstqwn", "atwbbfjd", "dataaqcqlexobeekzyeb", - "cahhfuydg", "datalhnwoh", "lcvibp", "datahitavgayusp"))) - .withDefines(mapOf("bdefepwkhruzzw", "datae", "vnlhsxeasxsqqu", "databbozivfoy", "dukp", "datavscbpkmo", - "vmo", "dataqyibwu")) - .withVariables(mapOf("azuboig", "datairfk", "bbjzdv", "datarw", "zpvjwego", "dataqoilgkzn")) - .withQueryTimeout(660954053); - model = BinaryData.fromObject(model).toObject(HDInsightHiveActivityTypeProperties.class); - Assertions.assertEquals("lkdoukqscmds", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("dxcizropzgjle", model.scriptLinkedService().referenceName()); - Assertions.assertEquals(660954053, model.queryTimeout()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java deleted file mode 100644 index ce28fa246b24..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTests.java +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.HDInsightMapReduceActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightMapReduceActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightMapReduceActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightMapReduce\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"oaafksormfhr\",\"parameters\":{\"ob\":\"dataxmnrdf\",\"aupjmjig\":\"datarvon\",\"xtoxlxojijtt\":\"datapbdfrtasau\",\"fs\":\"datayrxyn\"}}],\"arguments\":[\"dataci\",\"datashcpxftyh\"],\"getDebugInfo\":\"None\",\"className\":\"datauqsdurfqaawryc\",\"jarFilePath\":\"datazs\",\"jarLinkedService\":{\"referenceName\":\"rzt\",\"parameters\":{\"cighl\":\"dataysxxajr\",\"wcaxj\":\"dataddvnobesowbtnfq\"}},\"jarLibs\":[\"dataehoucmeuu\",\"dataajiotlcxofqjninr\",\"datakqekqtiuveaz\"],\"defines\":{\"lspih\":\"datawbiwygwpwquxiqg\",\"fizef\":\"dataxgvviotvoolk\",\"fwaehs\":\"databrndaquxvuf\"}},\"linkedServiceName\":{\"referenceName\":\"s\",\"parameters\":{\"eyeblkgupgnstq\":\"datavpys\"}},\"policy\":{\"timeout\":\"datafm\",\"retry\":\"dataoartvkhufktqgtjf\",\"retryIntervalInSeconds\":1205700344,\"secureInput\":false,\"secureOutput\":false,\"\":{\"kfzt\":\"datak\",\"kegyskmh\":\"datavonbtnnwa\"}},\"name\":\"wsbznjngerw\",\"description\":\"lpsswoslqmftk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"fbdbzbaboeegale\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"btnhiaqegj\":\"datayzilocn\",\"oqzzyrovvna\":\"datahyvnqbhclbbk\"}},{\"activity\":\"xmjmhclhcqcjn\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Completed\"],\"\":{\"jebbacscirztsapx\":\"datasidasovlrjggvyd\",\"whsfhecpstfe\":\"databk\",\"qmlgsghcnybhvzl\":\"databsl\"}}],\"userProperties\":[{\"name\":\"wjaepjm\",\"value\":\"dataruzogsszoqj\"},{\"name\":\"nfaxcd\",\"value\":\"datamqe\"},{\"name\":\"hsirotj\",\"value\":\"dataltugobscpt\"}],\"\":{\"fozn\":\"dataqyuvhlpmjpzgjnq\",\"oumpks\":\"datad\"}}") - .toObject(HDInsightMapReduceActivity.class); - Assertions.assertEquals("wsbznjngerw", model.name()); - Assertions.assertEquals("lpsswoslqmftk", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("fbdbzbaboeegale", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("wjaepjm", model.userProperties().get(0).name()); - Assertions.assertEquals("s", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1205700344, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("oaafksormfhr", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("rzt", model.jarLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightMapReduceActivity model = new HDInsightMapReduceActivity().withName("wsbznjngerw") - .withDescription("lpsswoslqmftk") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("fbdbzbaboeegale") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xmjmhclhcqcjn") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("wjaepjm").withValue("dataruzogsszoqj"), - new UserProperty().withName("nfaxcd").withValue("datamqe"), - new UserProperty().withName("hsirotj").withValue("dataltugobscpt"))) - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("s").withParameters(mapOf("eyeblkgupgnstq", "datavpys"))) - .withPolicy(new ActivityPolicy().withTimeout("datafm") - .withRetry("dataoartvkhufktqgtjf") - .withRetryIntervalInSeconds(1205700344) - .withSecureInput(false) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("oaafksormfhr") - .withParameters(mapOf("ob", "dataxmnrdf", "aupjmjig", "datarvon", "xtoxlxojijtt", "datapbdfrtasau", - "fs", "datayrxyn")))) - .withArguments(Arrays.asList("dataci", "datashcpxftyh")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.NONE) - .withClassName("datauqsdurfqaawryc") - .withJarFilePath("datazs") - .withJarLinkedService(new LinkedServiceReference().withReferenceName("rzt") - .withParameters(mapOf("cighl", "dataysxxajr", "wcaxj", "dataddvnobesowbtnfq"))) - .withJarLibs(Arrays.asList("dataehoucmeuu", "dataajiotlcxofqjninr", "datakqekqtiuveaz")) - .withDefines( - mapOf("lspih", "datawbiwygwpwquxiqg", "fizef", "dataxgvviotvoolk", "fwaehs", "databrndaquxvuf")); - model = BinaryData.fromObject(model).toObject(HDInsightMapReduceActivity.class); - Assertions.assertEquals("wsbznjngerw", model.name()); - Assertions.assertEquals("lpsswoslqmftk", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("fbdbzbaboeegale", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("wjaepjm", model.userProperties().get(0).name()); - Assertions.assertEquals("s", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1205700344, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals("oaafksormfhr", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("rzt", model.jarLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java deleted file mode 100644 index c8bf44a44efe..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightMapReduceActivityTypePropertiesTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HDInsightMapReduceActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightMapReduceActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightMapReduceActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"jpfsmdgrhd\",\"parameters\":{\"bkwvrrptbls\":\"dataehhqxyj\",\"ryfxwwqbeyv\":\"datatakz\"}}],\"arguments\":[\"datajmjieskuimvziy\"],\"getDebugInfo\":\"Always\",\"className\":\"datanxcimalvzxu\",\"jarFilePath\":\"datanpaesraire\",\"jarLinkedService\":{\"referenceName\":\"fbisljhg\",\"parameters\":{\"cbrmmweeuyohjhpx\":\"datamfrmqev\"}},\"jarLibs\":[\"dataiurmlirnadqeq\",\"dataxzcxvpogrtkd\"],\"defines\":{\"xfowfnsyyeytrwyo\":\"datawokefdeeppycwsy\",\"eemjazq\":\"datahmgv\"}}") - .toObject(HDInsightMapReduceActivityTypeProperties.class); - Assertions.assertEquals("jpfsmdgrhd", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("fbisljhg", model.jarLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightMapReduceActivityTypeProperties model = new HDInsightMapReduceActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("jpfsmdgrhd") - .withParameters(mapOf("bkwvrrptbls", "dataehhqxyj", "ryfxwwqbeyv", "datatakz")))) - .withArguments(Arrays.asList("datajmjieskuimvziy")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withClassName("datanxcimalvzxu") - .withJarFilePath("datanpaesraire") - .withJarLinkedService(new LinkedServiceReference().withReferenceName("fbisljhg") - .withParameters(mapOf("cbrmmweeuyohjhpx", "datamfrmqev"))) - .withJarLibs(Arrays.asList("dataiurmlirnadqeq", "dataxzcxvpogrtkd")) - .withDefines(mapOf("xfowfnsyyeytrwyo", "datawokefdeeppycwsy", "eemjazq", "datahmgv")); - model = BinaryData.fromObject(model).toObject(HDInsightMapReduceActivityTypeProperties.class); - Assertions.assertEquals("jpfsmdgrhd", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("fbisljhg", model.jarLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java deleted file mode 100644 index d0a50df52846..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTests.java +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.HDInsightPigActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightPigActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightPigActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightPig\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"yrajdvvsaatyyper\",\"parameters\":{\"tvotfhhayfxkf\":\"datacchvwwchaz\",\"cmfm\":\"dataxxefzliguwqos\",\"gjcn\":\"datanlj\"}},{\"referenceName\":\"a\",\"parameters\":{\"nrvjbnhbxvvu\":\"datativsknewpnp\",\"kj\":\"dataq\"}},{\"referenceName\":\"o\",\"parameters\":{\"cqdthmlqamdlcu\":\"dataqns\"}}],\"arguments\":\"datamrvryakc\",\"getDebugInfo\":\"Failure\",\"scriptPath\":\"datap\",\"scriptLinkedService\":{\"referenceName\":\"ar\",\"parameters\":{\"bwobovexsnmwwhbm\":\"dataabbxexacgmtpk\",\"nkmkcimksfejzm\":\"datajlsztpygqwkdlx\",\"nb\":\"datavlbzmngxzp\",\"kjfkaoe\":\"dataovhddvtnbtvl\"}},\"defines\":{\"ufi\":\"datayizdglzz\",\"ykng\":\"datawvyxy\",\"hwrnc\":\"datatjgpyvjgsjyjnhwb\",\"pb\":\"datawzuerrvpamfpini\"}},\"linkedServiceName\":{\"referenceName\":\"mfbruuhylqgeovn\",\"parameters\":{\"rqu\":\"datafsol\",\"jshicvrmwbgpc\":\"datalnhxr\",\"bxppvpgsrfshkjg\":\"datal\",\"rooogijiqwxwpub\":\"datapboaevtxi\"}},\"policy\":{\"timeout\":\"datanp\",\"retry\":\"datavruhdjzivlaxii\",\"retryIntervalInSeconds\":479943524,\"secureInput\":false,\"secureOutput\":true,\"\":{\"n\":\"datazixmksxxbdtjv\",\"kzulmqxficinw\":\"datanv\",\"x\":\"datajve\",\"t\":\"dataxerxttobosjxbny\"}},\"name\":\"nruditumyycvty\",\"description\":\"lyimhspj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"qvwhjgtbhre\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\"],\"\":{\"ougcwzgd\":\"datapbtqibq\",\"tp\":\"datadrdxoutkgezuln\",\"oljbpoeo\":\"dataarejxjhl\"}}],\"userProperties\":[{\"name\":\"khhavwhrivvzrccy\",\"value\":\"datarx\"}],\"\":{\"deearbbxane\":\"datapw\",\"cppqcgbpcen\":\"dataiqkjupvidzh\",\"hd\":\"datar\",\"btlharjbakp\":\"dataxup\"}}") - .toObject(HDInsightPigActivity.class); - Assertions.assertEquals("nruditumyycvty", model.name()); - Assertions.assertEquals("lyimhspj", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qvwhjgtbhre", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("khhavwhrivvzrccy", model.userProperties().get(0).name()); - Assertions.assertEquals("mfbruuhylqgeovn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(479943524, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("yrajdvvsaatyyper", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("ar", model.scriptLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightPigActivity model = new HDInsightPigActivity().withName("nruditumyycvty") - .withDescription("lyimhspj") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("qvwhjgtbhre") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("khhavwhrivvzrccy").withValue("datarx"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mfbruuhylqgeovn") - .withParameters(mapOf("rqu", "datafsol", "jshicvrmwbgpc", "datalnhxr", "bxppvpgsrfshkjg", "datal", - "rooogijiqwxwpub", "datapboaevtxi"))) - .withPolicy(new ActivityPolicy().withTimeout("datanp") - .withRetry("datavruhdjzivlaxii") - .withRetryIntervalInSeconds(479943524) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withStorageLinkedServices( - Arrays - .asList( - new LinkedServiceReference().withReferenceName("yrajdvvsaatyyper") - .withParameters(mapOf("tvotfhhayfxkf", "datacchvwwchaz", "cmfm", "dataxxefzliguwqos", - "gjcn", "datanlj")), - new LinkedServiceReference() - .withReferenceName("a") - .withParameters(mapOf("nrvjbnhbxvvu", "datativsknewpnp", "kj", "dataq")), - new LinkedServiceReference().withReferenceName("o") - .withParameters(mapOf("cqdthmlqamdlcu", "dataqns")))) - .withArguments("datamrvryakc") - .withGetDebugInfo(HDInsightActivityDebugInfoOption.FAILURE) - .withScriptPath("datap") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("ar") - .withParameters(mapOf("bwobovexsnmwwhbm", "dataabbxexacgmtpk", "nkmkcimksfejzm", "datajlsztpygqwkdlx", - "nb", "datavlbzmngxzp", "kjfkaoe", "dataovhddvtnbtvl"))) - .withDefines(mapOf("ufi", "datayizdglzz", "ykng", "datawvyxy", "hwrnc", "datatjgpyvjgsjyjnhwb", "pb", - "datawzuerrvpamfpini")); - model = BinaryData.fromObject(model).toObject(HDInsightPigActivity.class); - Assertions.assertEquals("nruditumyycvty", model.name()); - Assertions.assertEquals("lyimhspj", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("qvwhjgtbhre", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("khhavwhrivvzrccy", model.userProperties().get(0).name()); - Assertions.assertEquals("mfbruuhylqgeovn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(479943524, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("yrajdvvsaatyyper", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.FAILURE, model.getDebugInfo()); - Assertions.assertEquals("ar", model.scriptLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java deleted file mode 100644 index d4141601a31f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightPigActivityTypePropertiesTests.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HDInsightPigActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightPigActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightPigActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"ugcngdulvd\",\"parameters\":{\"snljduwkbozlmrh\":\"dataguvaimkoyrp\",\"tevvowiyp\":\"dataghvlvdjxbjqiab\",\"cfta\":\"dataljzkxkdfyv\",\"neykxewemtazmrej\":\"dataydcrjlh\"}}],\"arguments\":\"dataxoqege\",\"getDebugInfo\":\"Always\",\"scriptPath\":\"dataf\",\"scriptLinkedService\":{\"referenceName\":\"glmyrkrtdk\",\"parameters\":{\"itlmcaehjhwkl\":\"datazwgrs\"}},\"defines\":{\"nwe\":\"dataqqgyp\",\"ytk\":\"databngojnaks\"}}") - .toObject(HDInsightPigActivityTypeProperties.class); - Assertions.assertEquals("ugcngdulvd", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("glmyrkrtdk", model.scriptLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightPigActivityTypeProperties model = new HDInsightPigActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList(new LinkedServiceReference().withReferenceName("ugcngdulvd") - .withParameters(mapOf("snljduwkbozlmrh", "dataguvaimkoyrp", "tevvowiyp", "dataghvlvdjxbjqiab", "cfta", - "dataljzkxkdfyv", "neykxewemtazmrej", "dataydcrjlh")))) - .withArguments("dataxoqege") - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withScriptPath("dataf") - .withScriptLinkedService(new LinkedServiceReference().withReferenceName("glmyrkrtdk") - .withParameters(mapOf("itlmcaehjhwkl", "datazwgrs"))) - .withDefines(mapOf("nwe", "dataqqgyp", "ytk", "databngojnaks")); - model = BinaryData.fromObject(model).toObject(HDInsightPigActivityTypeProperties.class); - Assertions.assertEquals("ugcngdulvd", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("glmyrkrtdk", model.scriptLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java deleted file mode 100644 index ee67117f18ff..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTests.java +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.HDInsightSparkActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightSparkActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightSparkActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightSpark\",\"typeProperties\":{\"rootPath\":\"dataitfaekpxvetdr\",\"entryFilePath\":\"datamt\",\"arguments\":[\"datawt\",\"dataknylxr\"],\"getDebugInfo\":\"Always\",\"sparkJobLinkedService\":{\"referenceName\":\"skwujhskxxekzysa\",\"parameters\":{\"icsfaqy\":\"dataimcaxgtwpzqtim\"}},\"className\":\"cpdtktfpjkxk\",\"proxyUser\":\"datawntnfoqwufor\",\"sparkConfig\":{\"yrpipslc\":\"dataeamipnsyed\"}},\"linkedServiceName\":{\"referenceName\":\"grz\",\"parameters\":{\"hncxoqxtjzdpll\":\"databodifghdgs\",\"soxxoqyik\":\"datallvkorg\"}},\"policy\":{\"timeout\":\"datao\",\"retry\":\"datattxqxvmybq\",\"retryIntervalInSeconds\":1309677510,\"secureInput\":false,\"secureOutput\":false,\"\":{\"mdplhzjiqi\":\"datac\"}},\"name\":\"miw\",\"description\":\"mzkxrqzgshqx\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"sla\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Completed\",\"Failed\"],\"\":{\"nmdaivezqz\":\"dataalhlp\",\"rlrcc\":\"datazdipnhbs\",\"xtl\":\"datamnoasyyadyf\",\"ekuovwiwtykpr\":\"datanzcmdgsv\"}},{\"activity\":\"ddbenfjhfszmxpos\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\",\"Completed\"],\"\":{\"j\":\"datadkpdleeslj\",\"ni\":\"datasubxggknmv\",\"cpsjeazzdjcsbk\":\"dataoqy\",\"qdxpnzpuknf\":\"dataaluchbfrtajwsdd\"}},{\"activity\":\"ggitxsyufexivh\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"fccqjenzloxaz\":\"datatkqofr\",\"vqaeyjozbdwflx\":\"datawij\",\"dletjiudcoktsgc\":\"datawliitaieledmiup\",\"grebecxuuzeuklu\":\"datapjlmsta\"}}],\"userProperties\":[{\"name\":\"ejamychwwrvvtj\",\"value\":\"datakttxvmbedvvmr\"},{\"name\":\"nmgabfz\",\"value\":\"dataai\"},{\"name\":\"hylzwzhlbp\",\"value\":\"dataplethekn\"}],\"\":{\"nsvjctytyt\":\"datamtvooaacefx\",\"ywi\":\"datarvtuxv\",\"kfqznvahpxdg\":\"datammmgbynvoytdt\"}}") - .toObject(HDInsightSparkActivity.class); - Assertions.assertEquals("miw", model.name()); - Assertions.assertEquals("mzkxrqzgshqx", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("sla", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ejamychwwrvvtj", model.userProperties().get(0).name()); - Assertions.assertEquals("grz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1309677510, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("skwujhskxxekzysa", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("cpdtktfpjkxk", model.className()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightSparkActivity model = new HDInsightSparkActivity().withName("miw") - .withDescription("mzkxrqzgshqx") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("sla") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ddbenfjhfszmxpos") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ggitxsyufexivh") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ejamychwwrvvtj").withValue("datakttxvmbedvvmr"), - new UserProperty().withName("nmgabfz").withValue("dataai"), - new UserProperty().withName("hylzwzhlbp").withValue("dataplethekn"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("grz") - .withParameters(mapOf("hncxoqxtjzdpll", "databodifghdgs", "soxxoqyik", "datallvkorg"))) - .withPolicy(new ActivityPolicy().withTimeout("datao") - .withRetry("datattxqxvmybq") - .withRetryIntervalInSeconds(1309677510) - .withSecureInput(false) - .withSecureOutput(false) - .withAdditionalProperties(mapOf())) - .withRootPath("dataitfaekpxvetdr") - .withEntryFilePath("datamt") - .withArguments(Arrays.asList("datawt", "dataknylxr")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("skwujhskxxekzysa") - .withParameters(mapOf("icsfaqy", "dataimcaxgtwpzqtim"))) - .withClassName("cpdtktfpjkxk") - .withProxyUser("datawntnfoqwufor") - .withSparkConfig(mapOf("yrpipslc", "dataeamipnsyed")); - model = BinaryData.fromObject(model).toObject(HDInsightSparkActivity.class); - Assertions.assertEquals("miw", model.name()); - Assertions.assertEquals("mzkxrqzgshqx", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("sla", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ejamychwwrvvtj", model.userProperties().get(0).name()); - Assertions.assertEquals("grz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1309677510, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(false, model.policy().secureOutput()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("skwujhskxxekzysa", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("cpdtktfpjkxk", model.className()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java deleted file mode 100644 index 45e3379535cd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightSparkActivityTypePropertiesTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HDInsightSparkActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightSparkActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightSparkActivityTypeProperties model = BinaryData.fromString( - "{\"rootPath\":\"datahowxcptx\",\"entryFilePath\":\"dataxfwwvmygcfaztoi\",\"arguments\":[\"datajri\",\"datacamgjyt\",\"datakttit\"],\"getDebugInfo\":\"None\",\"sparkJobLinkedService\":{\"referenceName\":\"xpmoadjooernzl\",\"parameters\":{\"awptxqxpuf\":\"dataygoutqebpuoy\",\"j\":\"dataxp\",\"cecukzt\":\"dataajvskpbu\",\"wwfgjjca\":\"datau\"}},\"className\":\"cepp\",\"proxyUser\":\"datailyxpqxnlifhjym\",\"sparkConfig\":{\"jphozymcypdbuoqn\":\"dataliivyatyzwybgay\",\"gidgwscosmhgza\":\"datatlz\",\"yavfc\":\"datacgdk\"}}") - .toObject(HDInsightSparkActivityTypeProperties.class); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("xpmoadjooernzl", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("cepp", model.className()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightSparkActivityTypeProperties model - = new HDInsightSparkActivityTypeProperties().withRootPath("datahowxcptx") - .withEntryFilePath("dataxfwwvmygcfaztoi") - .withArguments(Arrays.asList("datajri", "datacamgjyt", "datakttit")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.NONE) - .withSparkJobLinkedService(new LinkedServiceReference().withReferenceName("xpmoadjooernzl") - .withParameters(mapOf("awptxqxpuf", "dataygoutqebpuoy", "j", "dataxp", "cecukzt", "dataajvskpbu", - "wwfgjjca", "datau"))) - .withClassName("cepp") - .withProxyUser("datailyxpqxnlifhjym") - .withSparkConfig( - mapOf("jphozymcypdbuoqn", "dataliivyatyzwybgay", "gidgwscosmhgza", "datatlz", "yavfc", "datacgdk")); - model = BinaryData.fromObject(model).toObject(HDInsightSparkActivityTypeProperties.class); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.NONE, model.getDebugInfo()); - Assertions.assertEquals("xpmoadjooernzl", model.sparkJobLinkedService().referenceName()); - Assertions.assertEquals("cepp", model.className()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java deleted file mode 100644 index cb78963ed527..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTests.java +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.HDInsightStreamingActivity; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightStreamingActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightStreamingActivity model = BinaryData.fromString( - "{\"type\":\"HDInsightStreaming\",\"typeProperties\":{\"storageLinkedServices\":[{\"referenceName\":\"gkxtgs\",\"parameters\":{\"puds\":\"datanholkoyxm\",\"xs\":\"datawvzunrqvup\",\"moftilhoye\":\"datanqzdfjwofgzif\",\"dircdtkvorif\":\"datahwaepg\"}},{\"referenceName\":\"qmfvzubmhsxtryy\",\"parameters\":{\"vturdh\":\"datammu\",\"zwmpt\":\"datanmhr\"}},{\"referenceName\":\"ygqztnbvueiz\",\"parameters\":{\"tbyiyeig\":\"datagqxekbvwxyumqoqw\",\"nhqxzewlwwdmpvpc\":\"datagrzvegxmx\"}}],\"arguments\":[\"dataovzkwhdtf\",\"datavfctsfujdapc\",\"datagamgbnktg\"],\"getDebugInfo\":\"Always\",\"mapper\":\"datadydb\",\"reducer\":\"datatexkwcolnaer\",\"input\":\"datasdyvah\",\"output\":\"dataidpc\",\"filePaths\":[\"datavnzhdsaqmekgt\",\"dataojrruhzvve\",\"datarwl\"],\"fileLinkedService\":{\"referenceName\":\"snlmdosiyzf\",\"parameters\":{\"aekcpum\":\"datazkggbmzdnyrmol\",\"sakoucssc\":\"datakc\"}},\"combiner\":\"dataqilzogilgr\",\"commandEnvironment\":[\"dataypwhfybflrpvcgq\"],\"defines\":{\"fjkxxn\":\"dataksghpsqvuisedeqr\",\"vwxvlsvicvpa\":\"datarqdxvbt\",\"emlozjyovrllv\":\"datawohkromzs\",\"ez\":\"databgkgjp\"}},\"linkedServiceName\":{\"referenceName\":\"flgps\",\"parameters\":{\"wzpfbiqjrz\":\"datanan\",\"jzknkffzdyozn\":\"datarxizorqliblybx\",\"dedlmfw\":\"datadsto\"}},\"policy\":{\"timeout\":\"datagfwebiwxm\",\"retry\":\"dataxidazslwhuyikh\",\"retryIntervalInSeconds\":1550086880,\"secureInput\":false,\"secureOutput\":true,\"\":{\"qrdvqvaloauuwoi\":\"datakpfperheiplzmsw\",\"myomavbotaoaix\":\"dataofumbpmzedmfjgk\",\"infy\":\"datapcw\"}},\"name\":\"tqvjn\",\"description\":\"mqwut\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"wraow\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\",\"Failed\"],\"\":{\"dhwoyznjddshazl\":\"datajdmwcxvcronr\",\"otniqzqmpgvy\":\"datamvxvip\",\"fftszsw\":\"datadjwwbrhjhcw\",\"ghjgvhgyeoikxjpu\":\"datay\"}},{\"activity\":\"ggpsaqfn\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"legtsqz\":\"datau\"}},{\"activity\":\"zworuhh\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"gpnxu\":\"datahfxb\"}}],\"userProperties\":[{\"name\":\"gpznbklhwutyuvu\",\"value\":\"dataq\"},{\"name\":\"rfziubeflvktjbmc\",\"value\":\"dataplnukdawgzhbwh\"},{\"name\":\"vonuhv\",\"value\":\"datagxck\"}],\"\":{\"ocothsgxjc\":\"datao\"}}") - .toObject(HDInsightStreamingActivity.class); - Assertions.assertEquals("tqvjn", model.name()); - Assertions.assertEquals("mqwut", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("wraow", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gpznbklhwutyuvu", model.userProperties().get(0).name()); - Assertions.assertEquals("flgps", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1550086880, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("gkxtgs", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("snlmdosiyzf", model.fileLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightStreamingActivity model = new HDInsightStreamingActivity().withName("tqvjn") - .withDescription("mqwut") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("wraow") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ggpsaqfn") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zworuhh") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gpznbklhwutyuvu").withValue("dataq"), - new UserProperty().withName("rfziubeflvktjbmc").withValue("dataplnukdawgzhbwh"), - new UserProperty().withName("vonuhv").withValue("datagxck"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("flgps") - .withParameters( - mapOf("wzpfbiqjrz", "datanan", "jzknkffzdyozn", "datarxizorqliblybx", "dedlmfw", "datadsto"))) - .withPolicy(new ActivityPolicy().withTimeout("datagfwebiwxm") - .withRetry("dataxidazslwhuyikh") - .withRetryIntervalInSeconds(1550086880) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("gkxtgs") - .withParameters(mapOf("puds", "datanholkoyxm", "xs", "datawvzunrqvup", "moftilhoye", - "datanqzdfjwofgzif", "dircdtkvorif", "datahwaepg")), - new LinkedServiceReference().withReferenceName("qmfvzubmhsxtryy") - .withParameters(mapOf("vturdh", "datammu", "zwmpt", "datanmhr")), - new LinkedServiceReference().withReferenceName("ygqztnbvueiz") - .withParameters(mapOf("tbyiyeig", "datagqxekbvwxyumqoqw", "nhqxzewlwwdmpvpc", "datagrzvegxmx")))) - .withArguments(Arrays.asList("dataovzkwhdtf", "datavfctsfujdapc", "datagamgbnktg")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withMapper("datadydb") - .withReducer("datatexkwcolnaer") - .withInput("datasdyvah") - .withOutput("dataidpc") - .withFilePaths(Arrays.asList("datavnzhdsaqmekgt", "dataojrruhzvve", "datarwl")) - .withFileLinkedService(new LinkedServiceReference().withReferenceName("snlmdosiyzf") - .withParameters(mapOf("aekcpum", "datazkggbmzdnyrmol", "sakoucssc", "datakc"))) - .withCombiner("dataqilzogilgr") - .withCommandEnvironment(Arrays.asList("dataypwhfybflrpvcgq")) - .withDefines(mapOf("fjkxxn", "dataksghpsqvuisedeqr", "vwxvlsvicvpa", "datarqdxvbt", "emlozjyovrllv", - "datawohkromzs", "ez", "databgkgjp")); - model = BinaryData.fromObject(model).toObject(HDInsightStreamingActivity.class); - Assertions.assertEquals("tqvjn", model.name()); - Assertions.assertEquals("mqwut", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("wraow", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gpznbklhwutyuvu", model.userProperties().get(0).name()); - Assertions.assertEquals("flgps", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1550086880, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("gkxtgs", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("snlmdosiyzf", model.fileLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java deleted file mode 100644 index cc2424fe99e6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HDInsightStreamingActivityTypePropertiesTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HDInsightStreamingActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.HDInsightActivityDebugInfoOption; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HDInsightStreamingActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HDInsightStreamingActivityTypeProperties model = BinaryData.fromString( - "{\"storageLinkedServices\":[{\"referenceName\":\"ajxeb\",\"parameters\":{\"hwdicntqsrhacjsb\":\"datayrctfaabkukraqnd\",\"uyffkayovljtrml\":\"datadhjdwfnbiyxqr\",\"wwbqukjithx\":\"datarqllugnxmbwdkz\",\"tbfmtbprt\":\"datapvpkvceiwcfshhc\"}},{\"referenceName\":\"vuxwuepjcugwku\",\"parameters\":{\"ieyqpu\":\"datawgmznvlwcnjhq\",\"txzcj\":\"datawzzx\",\"eoofoxfchunewkss\":\"databsqcwnbxqk\"}},{\"referenceName\":\"pnhhlhprjcfy\",\"parameters\":{\"xlonz\":\"datagxi\"}},{\"referenceName\":\"fdfs\",\"parameters\":{\"dhqkariatxhpxdvr\":\"datawdnghdnrt\"}}],\"arguments\":[\"dataghgxgzbzsasgamc\"],\"getDebugInfo\":\"Always\",\"mapper\":\"dataxnsg\",\"reducer\":\"datawadvuqba\",\"input\":\"datae\",\"output\":\"datatffqalmcqtm\",\"filePaths\":[\"datawwtcwbgmxwpynsl\"],\"fileLinkedService\":{\"referenceName\":\"yvmizx\",\"parameters\":{\"tdgpmhzlla\":\"datahqwwtar\",\"pk\":\"dataozsdnf\",\"lnjdaxio\":\"dataksuycyfubgn\",\"fdsztmqquycokpf\":\"dataulvpnqvcutwn\"}},\"combiner\":\"datajfwvmsfnsyxt\",\"commandEnvironment\":[\"dataqqzhqgmoexgny\",\"datagsa\"],\"defines\":{\"h\":\"dataeicexd\",\"kcstynjxfndxrofw\":\"datap\",\"dktio\":\"datatjhdbid\"}}") - .toObject(HDInsightStreamingActivityTypeProperties.class); - Assertions.assertEquals("ajxeb", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("yvmizx", model.fileLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HDInsightStreamingActivityTypeProperties model = new HDInsightStreamingActivityTypeProperties() - .withStorageLinkedServices(Arrays.asList( - new LinkedServiceReference().withReferenceName("ajxeb") - .withParameters(mapOf("hwdicntqsrhacjsb", "datayrctfaabkukraqnd", "uyffkayovljtrml", - "datadhjdwfnbiyxqr", "wwbqukjithx", "datarqllugnxmbwdkz", "tbfmtbprt", "datapvpkvceiwcfshhc")), - new LinkedServiceReference().withReferenceName("vuxwuepjcugwku") - .withParameters(mapOf("ieyqpu", "datawgmznvlwcnjhq", "txzcj", "datawzzx", "eoofoxfchunewkss", - "databsqcwnbxqk")), - new LinkedServiceReference().withReferenceName("pnhhlhprjcfy") - .withParameters(mapOf("xlonz", "datagxi")), - new LinkedServiceReference().withReferenceName("fdfs") - .withParameters(mapOf("dhqkariatxhpxdvr", "datawdnghdnrt")))) - .withArguments(Arrays.asList("dataghgxgzbzsasgamc")) - .withGetDebugInfo(HDInsightActivityDebugInfoOption.ALWAYS) - .withMapper("dataxnsg") - .withReducer("datawadvuqba") - .withInput("datae") - .withOutput("datatffqalmcqtm") - .withFilePaths(Arrays.asList("datawwtcwbgmxwpynsl")) - .withFileLinkedService(new LinkedServiceReference().withReferenceName("yvmizx") - .withParameters(mapOf("tdgpmhzlla", "datahqwwtar", "pk", "dataozsdnf", "lnjdaxio", "dataksuycyfubgn", - "fdsztmqquycokpf", "dataulvpnqvcutwn"))) - .withCombiner("datajfwvmsfnsyxt") - .withCommandEnvironment(Arrays.asList("dataqqzhqgmoexgny", "datagsa")) - .withDefines(mapOf("h", "dataeicexd", "kcstynjxfndxrofw", "datap", "dktio", "datatjhdbid")); - model = BinaryData.fromObject(model).toObject(HDInsightStreamingActivityTypeProperties.class); - Assertions.assertEquals("ajxeb", model.storageLinkedServices().get(0).referenceName()); - Assertions.assertEquals(HDInsightActivityDebugInfoOption.ALWAYS, model.getDebugInfo()); - Assertions.assertEquals("yvmizx", model.fileLinkedService().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java deleted file mode 100644 index 550ac81b706d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsLocationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HdfsLocation; - -public final class HdfsLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HdfsLocation model = BinaryData.fromString( - "{\"type\":\"HdfsLocation\",\"folderPath\":\"datahjhausy\",\"fileName\":\"dataekymffztsilscvqs\",\"\":{\"fymkouih\":\"datai\",\"zhogsmgbvmtdw\":\"dataeseuugci\",\"jnfveg\":\"dataqbe\"}}") - .toObject(HdfsLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HdfsLocation model = new HdfsLocation().withFolderPath("datahjhausy").withFileName("dataekymffztsilscvqs"); - model = BinaryData.fromObject(model).toObject(HdfsLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java deleted file mode 100644 index c2f66e8d5dea..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsReadSettingsTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DistcpSettings; -import com.azure.resourcemanager.datafactory.models.HdfsReadSettings; - -public final class HdfsReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HdfsReadSettings model = BinaryData.fromString( - "{\"type\":\"HdfsReadSettings\",\"recursive\":\"dataawl\",\"wildcardFolderPath\":\"datawvrovjvjjuypcnb\",\"wildcardFileName\":\"databejmujz\",\"fileListPath\":\"datapspaxhfuwygssssj\",\"enablePartitionDiscovery\":\"datafep\",\"partitionRootPath\":\"datamssdvjvdcc\",\"modifiedDatetimeStart\":\"datazrdjfmmwkakyxy\",\"modifiedDatetimeEnd\":\"datadelr\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"datamffvbhtueynx\",\"tempScriptPath\":\"dataxqnwce\",\"distcpOptions\":\"datasbncadfa\"},\"deleteFilesAfterCompletion\":\"dataebj\",\"maxConcurrentConnections\":\"datawwgverbywuuvei\",\"disableMetricsCollection\":\"databpzdwhxput\",\"\":{\"rw\":\"datal\"}}") - .toObject(HdfsReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HdfsReadSettings model = new HdfsReadSettings().withMaxConcurrentConnections("datawwgverbywuuvei") - .withDisableMetricsCollection("databpzdwhxput") - .withRecursive("dataawl") - .withWildcardFolderPath("datawvrovjvjjuypcnb") - .withWildcardFileName("databejmujz") - .withFileListPath("datapspaxhfuwygssssj") - .withEnablePartitionDiscovery("datafep") - .withPartitionRootPath("datamssdvjvdcc") - .withModifiedDatetimeStart("datazrdjfmmwkakyxy") - .withModifiedDatetimeEnd("datadelr") - .withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("datamffvbhtueynx") - .withTempScriptPath("dataxqnwce") - .withDistcpOptions("datasbncadfa")) - .withDeleteFilesAfterCompletion("dataebj"); - model = BinaryData.fromObject(model).toObject(HdfsReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java deleted file mode 100644 index d353f5c70b1c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HdfsSourceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DistcpSettings; -import com.azure.resourcemanager.datafactory.models.HdfsSource; - -public final class HdfsSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HdfsSource model = BinaryData.fromString( - "{\"type\":\"HdfsSource\",\"recursive\":\"databtkcvola\",\"distcpSettings\":{\"resourceManagerEndpoint\":\"dataukgov\",\"tempScriptPath\":\"datau\",\"distcpOptions\":\"datadcqoxyxiyhmj\"},\"sourceRetryCount\":\"datanw\",\"sourceRetryWait\":\"datazgvaeqiygbo\",\"maxConcurrentConnections\":\"datazjodidgud\",\"disableMetricsCollection\":\"dataclajbenfyuuf\",\"\":{\"il\":\"dataikffczwaew\",\"pbwfna\":\"datauhsghdov\"}}") - .toObject(HdfsSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HdfsSource model = new HdfsSource().withSourceRetryCount("datanw") - .withSourceRetryWait("datazgvaeqiygbo") - .withMaxConcurrentConnections("datazjodidgud") - .withDisableMetricsCollection("dataclajbenfyuuf") - .withRecursive("databtkcvola") - .withDistcpSettings(new DistcpSettings().withResourceManagerEndpoint("dataukgov") - .withTempScriptPath("datau") - .withDistcpOptions("datadcqoxyxiyhmj")); - model = BinaryData.fromObject(model).toObject(HdfsSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java deleted file mode 100644 index 6ce80e12c7df..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HiveDatasetTypeProperties; - -public final class HiveDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HiveDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datawzhxzuxerxhywl\",\"table\":\"dataqsqvvdkfpfj\",\"schema\":\"datajd\"}") - .toObject(HiveDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HiveDatasetTypeProperties model = new HiveDatasetTypeProperties().withTableName("datawzhxzuxerxhywl") - .withTable("dataqsqvvdkfpfj") - .withSchema("datajd"); - model = BinaryData.fromObject(model).toObject(HiveDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java deleted file mode 100644 index 8a40a6e2a34d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveObjectDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.HiveObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HiveObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HiveObjectDataset model = BinaryData.fromString( - "{\"type\":\"HiveObject\",\"typeProperties\":{\"tableName\":\"datak\",\"table\":\"datarqkekcdaviiebeqr\",\"schema\":\"dataavqymcwt\"},\"description\":\"uceplbrzgkuorwp\",\"structure\":\"datastwe\",\"schema\":\"dataptscru\",\"linkedServiceName\":{\"referenceName\":\"kkiela\",\"parameters\":{\"yoifgdfzjqthyk\":\"dataoyjyflsmsbn\",\"kxxlwwo\":\"datavoevcwfzo\",\"ubdmg\":\"dataxgbsdzcgcvypj\"}},\"parameters\":{\"oibm\":{\"type\":\"String\",\"defaultValue\":\"dataujcqgzwvxwiu\"},\"qoqovqhgphgx\":{\"type\":\"Array\",\"defaultValue\":\"dataqrljdcukylaxrj\"},\"cy\":{\"type\":\"Bool\",\"defaultValue\":\"datad\"}},\"annotations\":[\"datavumryd\"],\"folder\":{\"name\":\"ivahfcqwnjzebpic\"},\"\":{\"qeigxuyxsxteuik\":\"dataoypoedkspwwibpy\",\"fqderkr\":\"dataznfffnhcgnaqsrm\",\"znbbyzposzfut\":\"datasdcobpmgqlwy\",\"tzpvqew\":\"datapbygbnbcmoiq\"}}") - .toObject(HiveObjectDataset.class); - Assertions.assertEquals("uceplbrzgkuorwp", model.description()); - Assertions.assertEquals("kkiela", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oibm").type()); - Assertions.assertEquals("ivahfcqwnjzebpic", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HiveObjectDataset model = new HiveObjectDataset().withDescription("uceplbrzgkuorwp") - .withStructure("datastwe") - .withSchema("dataptscru") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kkiela") - .withParameters(mapOf("yoifgdfzjqthyk", "dataoyjyflsmsbn", "kxxlwwo", "datavoevcwfzo", "ubdmg", - "dataxgbsdzcgcvypj"))) - .withParameters(mapOf("oibm", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataujcqgzwvxwiu"), - "qoqovqhgphgx", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataqrljdcukylaxrj"), "cy", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datad"))) - .withAnnotations(Arrays.asList("datavumryd")) - .withFolder(new DatasetFolder().withName("ivahfcqwnjzebpic")) - .withTableName("datak") - .withTable("datarqkekcdaviiebeqr") - .withSchemaTypePropertiesSchema("dataavqymcwt"); - model = BinaryData.fromObject(model).toObject(HiveObjectDataset.class); - Assertions.assertEquals("uceplbrzgkuorwp", model.description()); - Assertions.assertEquals("kkiela", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("oibm").type()); - Assertions.assertEquals("ivahfcqwnjzebpic", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java deleted file mode 100644 index 82850031644a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HiveSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HiveSource; - -public final class HiveSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HiveSource model = BinaryData.fromString( - "{\"type\":\"HiveSource\",\"query\":\"datam\",\"queryTimeout\":\"dataklzomdfcp\",\"additionalColumns\":\"dataimijzhrbs\",\"sourceRetryCount\":\"datavublouel\",\"sourceRetryWait\":\"dataqfbgeblp\",\"maxConcurrentConnections\":\"datackmnpzubz\",\"disableMetricsCollection\":\"dataswgfjrg\",\"\":{\"nklfs\":\"datapbshqzzlcfemnry\",\"ygczab\":\"datazsyigxsyx\",\"pcdia\":\"datapeuqyzfdsu\"}}") - .toObject(HiveSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HiveSource model = new HiveSource().withSourceRetryCount("datavublouel") - .withSourceRetryWait("dataqfbgeblp") - .withMaxConcurrentConnections("datackmnpzubz") - .withDisableMetricsCollection("dataswgfjrg") - .withQueryTimeout("dataklzomdfcp") - .withAdditionalColumns("dataimijzhrbs") - .withQuery("datam"); - model = BinaryData.fromObject(model).toObject(HiveSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java deleted file mode 100644 index 5c9a26ed237d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTests.java +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import com.azure.resourcemanager.datafactory.models.HttpDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HttpDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HttpDataset model = BinaryData.fromString( - "{\"type\":\"HttpFile\",\"typeProperties\":{\"relativeUrl\":\"dataqnl\",\"requestMethod\":\"dataxc\",\"requestBody\":\"datanitodmrahj\",\"additionalHeaders\":\"datadodnvltcvmah\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datakupbbnhiclhyzhr\",\"deserializer\":\"datadfwbifnnhlsf\",\"\":{\"fijtezgxmp\":\"dataimtfcqmmynbrpel\",\"ivczktllxswtdap\":\"dataszamadlerzinf\",\"xcevdspth\":\"datamirmnrijefmrt\"}},\"compression\":{\"type\":\"datafmwtblgm\",\"level\":\"datakqoikxiefwln\",\"\":{\"cotelikjiyteh\":\"dataffcnuestbsliejdn\",\"gncrdorcty\":\"dataxtzxqdwbymuql\",\"mmwckozvlf\":\"dataecpekhxdbyhqtzcv\"}}},\"description\":\"trtsvx\",\"structure\":\"dataqtzckjbcbkgnrf\",\"schema\":\"dataschjxncqzah\",\"linkedServiceName\":{\"referenceName\":\"tvbgdobi\",\"parameters\":{\"sgihtrxueqbmxqf\":\"datazolx\"}},\"parameters\":{\"shesgcsqose\":{\"type\":\"Object\",\"defaultValue\":\"dataqs\"},\"spfyvslazip\":{\"type\":\"Float\",\"defaultValue\":\"datangoufpizpbmfx\"},\"i\":{\"type\":\"Object\",\"defaultValue\":\"datagtdumjty\"}},\"annotations\":[\"dataznlaxozqthk\",\"dataxfugfzizyxd\"],\"folder\":{\"name\":\"nqzbrq\"},\"\":{\"amnsbqoitwhmucj\":\"datamfckviyju\"}}") - .toObject(HttpDataset.class); - Assertions.assertEquals("trtsvx", model.description()); - Assertions.assertEquals("tvbgdobi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("shesgcsqose").type()); - Assertions.assertEquals("nqzbrq", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HttpDataset model = new HttpDataset().withDescription("trtsvx") - .withStructure("dataqtzckjbcbkgnrf") - .withSchema("dataschjxncqzah") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tvbgdobi") - .withParameters(mapOf("sgihtrxueqbmxqf", "datazolx"))) - .withParameters(mapOf("shesgcsqose", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataqs"), "spfyvslazip", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datangoufpizpbmfx"), "i", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datagtdumjty"))) - .withAnnotations(Arrays.asList("dataznlaxozqthk", "dataxfugfzizyxd")) - .withFolder(new DatasetFolder().withName("nqzbrq")) - .withRelativeUrl("dataqnl") - .withRequestMethod("dataxc") - .withRequestBody("datanitodmrahj") - .withAdditionalHeaders("datadodnvltcvmah") - .withFormat(new DatasetStorageFormat().withSerializer("datakupbbnhiclhyzhr") - .withDeserializer("datadfwbifnnhlsf") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("datafmwtblgm") - .withLevel("datakqoikxiefwln") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(HttpDataset.class); - Assertions.assertEquals("trtsvx", model.description()); - Assertions.assertEquals("tvbgdobi", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("shesgcsqose").type()); - Assertions.assertEquals("nqzbrq", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java deleted file mode 100644 index 2e3532152036..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpDatasetTypePropertiesTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.HttpDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetStorageFormat; -import java.util.HashMap; -import java.util.Map; - -public final class HttpDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HttpDatasetTypeProperties model = BinaryData.fromString( - "{\"relativeUrl\":\"datahcxyvehy\",\"requestMethod\":\"dataelyqdvpqfbxg\",\"requestBody\":\"dataudusdmtxqlefnohe\",\"additionalHeaders\":\"datavfopkyl\",\"format\":{\"type\":\"DatasetStorageFormat\",\"serializer\":\"datayenjpwdmsfw\",\"deserializer\":\"datarsvevcneqswxhqh\",\"\":{\"yhtxgdmvghcm\":\"datatbzvulqevvjncp\",\"efb\":\"dataxmlwk\",\"fyim\":\"datayjrtalqeebu\"}},\"compression\":{\"type\":\"dataaezktomsgoihlqw\",\"level\":\"datawaazbaeeek\",\"\":{\"wnoljdkx\":\"datavnabfbbt\"}}}") - .toObject(HttpDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HttpDatasetTypeProperties model = new HttpDatasetTypeProperties().withRelativeUrl("datahcxyvehy") - .withRequestMethod("dataelyqdvpqfbxg") - .withRequestBody("dataudusdmtxqlefnohe") - .withAdditionalHeaders("datavfopkyl") - .withFormat(new DatasetStorageFormat().withSerializer("datayenjpwdmsfw") - .withDeserializer("datarsvevcneqswxhqh") - .withAdditionalProperties(mapOf("type", "DatasetStorageFormat"))) - .withCompression(new DatasetCompression().withType("dataaezktomsgoihlqw") - .withLevel("datawaazbaeeek") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(HttpDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java deleted file mode 100644 index 5085a25a083e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpReadSettingsTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HttpReadSettings; - -public final class HttpReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HttpReadSettings model = BinaryData.fromString( - "{\"type\":\"HttpReadSettings\",\"requestMethod\":\"dataoehyirsvredo\",\"requestBody\":\"datavqpblqubfpeom\",\"additionalHeaders\":\"dataynheamzlqvaj\",\"requestTimeout\":\"datas\",\"additionalColumns\":\"datatsythuioixpfg\",\"maxConcurrentConnections\":\"datawwojw\",\"disableMetricsCollection\":\"dataqcjrmnver\",\"\":{\"onmo\":\"dataehuweuitqe\"}}") - .toObject(HttpReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HttpReadSettings model = new HttpReadSettings().withMaxConcurrentConnections("datawwojw") - .withDisableMetricsCollection("dataqcjrmnver") - .withRequestMethod("dataoehyirsvredo") - .withRequestBody("datavqpblqubfpeom") - .withAdditionalHeaders("dataynheamzlqvaj") - .withRequestTimeout("datas") - .withAdditionalColumns("datatsythuioixpfg"); - model = BinaryData.fromObject(model).toObject(HttpReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java deleted file mode 100644 index fd34353e5739..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpServerLocationTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HttpServerLocation; - -public final class HttpServerLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HttpServerLocation model = BinaryData.fromString( - "{\"type\":\"HttpServerLocation\",\"relativeUrl\":\"dataocxm\",\"folderPath\":\"datashksny\",\"fileName\":\"dataspamwbwmbnls\",\"\":{\"d\":\"datafi\",\"pfliwo\":\"datatwtkvih\",\"p\":\"datanguuzhwvla\"}}") - .toObject(HttpServerLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HttpServerLocation model = new HttpServerLocation().withFolderPath("datashksny") - .withFileName("dataspamwbwmbnls") - .withRelativeUrl("dataocxm"); - model = BinaryData.fromObject(model).toObject(HttpServerLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java deleted file mode 100644 index a028983b1642..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HttpSourceTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HttpSource; - -public final class HttpSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HttpSource model = BinaryData.fromString( - "{\"type\":\"HttpSource\",\"httpRequestTimeout\":\"datazcxlgmu\",\"sourceRetryCount\":\"datawkk\",\"sourceRetryWait\":\"datahmdtjczpfoispch\",\"maxConcurrentConnections\":\"datavmvsbgyq\",\"disableMetricsCollection\":\"dataazmcuggts\",\"\":{\"bobqqnwh\":\"datazyepkrncjrqhug\",\"bzixqxx\":\"datamvdowlqcyhf\"}}") - .toObject(HttpSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HttpSource model = new HttpSource().withSourceRetryCount("datawkk") - .withSourceRetryWait("datahmdtjczpfoispch") - .withMaxConcurrentConnections("datavmvsbgyq") - .withDisableMetricsCollection("dataazmcuggts") - .withHttpRequestTimeout("datazcxlgmu"); - model = BinaryData.fromObject(model).toObject(HttpSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java deleted file mode 100644 index b4eb4c9e1bc1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotObjectDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.HubspotObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class HubspotObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HubspotObjectDataset model = BinaryData.fromString( - "{\"type\":\"HubspotObject\",\"typeProperties\":{\"tableName\":\"datamxxyfrdjid\"},\"description\":\"tfvgwfwsldigwo\",\"structure\":\"datapvydd\",\"schema\":\"datavclrsn\",\"linkedServiceName\":{\"referenceName\":\"frppwwqclmdmt\",\"parameters\":{\"iqmcjb\":\"dataepzpxzxlcqzfxai\"}},\"parameters\":{\"dmhk\":{\"type\":\"Array\",\"defaultValue\":\"datapqnip\"}},\"annotations\":[\"dataeobwkeuzltenlbf\",\"datalmxoz\",\"datasn\",\"dataoduomtxjbrixym\"],\"folder\":{\"name\":\"kk\"},\"\":{\"mrzcqfevnkyakc\":\"dataurhwishy\",\"de\":\"datatehognsddjk\",\"zogfcnxcxg\":\"dataesu\",\"ifjc\":\"dataumtcqxmyvkxixypa\"}}") - .toObject(HubspotObjectDataset.class); - Assertions.assertEquals("tfvgwfwsldigwo", model.description()); - Assertions.assertEquals("frppwwqclmdmt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("dmhk").type()); - Assertions.assertEquals("kk", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HubspotObjectDataset model = new HubspotObjectDataset().withDescription("tfvgwfwsldigwo") - .withStructure("datapvydd") - .withSchema("datavclrsn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("frppwwqclmdmt") - .withParameters(mapOf("iqmcjb", "dataepzpxzxlcqzfxai"))) - .withParameters( - mapOf("dmhk", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapqnip"))) - .withAnnotations(Arrays.asList("dataeobwkeuzltenlbf", "datalmxoz", "datasn", "dataoduomtxjbrixym")) - .withFolder(new DatasetFolder().withName("kk")) - .withTableName("datamxxyfrdjid"); - model = BinaryData.fromObject(model).toObject(HubspotObjectDataset.class); - Assertions.assertEquals("tfvgwfwsldigwo", model.description()); - Assertions.assertEquals("frppwwqclmdmt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("dmhk").type()); - Assertions.assertEquals("kk", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java deleted file mode 100644 index 9533f510b5cf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/HubspotSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.HubspotSource; - -public final class HubspotSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - HubspotSource model = BinaryData.fromString( - "{\"type\":\"HubspotSource\",\"query\":\"dataspcutgkmrsqaq\",\"queryTimeout\":\"datalnhgisiwzzk\",\"additionalColumns\":\"dataqrngl\",\"sourceRetryCount\":\"datatu\",\"sourceRetryWait\":\"datafwdkpadktsyy\",\"maxConcurrentConnections\":\"dataojrfqtfk\",\"disableMetricsCollection\":\"dataupmdajqpdvvzb\",\"\":{\"mvtqhn\":\"dataxokiffqpwdyzset\",\"cprkqywyb\":\"dataoij\"}}") - .toObject(HubspotSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - HubspotSource model = new HubspotSource().withSourceRetryCount("datatu") - .withSourceRetryWait("datafwdkpadktsyy") - .withMaxConcurrentConnections("dataojrfqtfk") - .withDisableMetricsCollection("dataupmdajqpdvvzb") - .withQueryTimeout("datalnhgisiwzzk") - .withAdditionalColumns("dataqrngl") - .withQuery("dataspcutgkmrsqaq"); - model = BinaryData.fromObject(model).toObject(HubspotSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java deleted file mode 100644 index f6b3f4b59256..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTests.java +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.IfConditionActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IfConditionActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IfConditionActivity model = BinaryData.fromString( - "{\"type\":\"IfCondition\",\"typeProperties\":{\"expression\":{\"value\":\"llnmddflck\"},\"ifTrueActivities\":[{\"type\":\"Activity\",\"name\":\"jpxpwxabvxwoa\",\"description\":\"eillszdgyxozkm\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"nbdvbuxljiqyrwdm\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"jzoyzy\":\"datawgvwmybokqpfhswb\"}},{\"activity\":\"ly\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Skipped\",\"Completed\"],\"\":{\"kgmgqynejqk\":\"datapkisefygdaume\",\"xexbksaf\":\"datasxiczvfxoihc\"}},{\"activity\":\"ecwyrtluujyespcg\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"iwiaqrc\":\"dataieyyww\",\"w\":\"datafybktbviaqvzzszc\",\"vygdefpy\":\"datarxo\",\"grdsmravxtgl\":\"datatwwaxx\"}},{\"activity\":\"xmd\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\",\"Skipped\"],\"\":{\"ypcwbyrkx\":\"datamnqcgbi\",\"chegeog\":\"dataebvxu\",\"wzzeumadl\":\"datakcrc\"}}],\"userProperties\":[{\"name\":\"rewhuqkic\",\"value\":\"datamyykmk\"},{\"name\":\"lbq\",\"value\":\"datanrmgefxkattpkkw\"},{\"name\":\"dvksigxak\",\"value\":\"dataoptb\"}],\"\":{\"bxk\":\"dataqobpnkvnuwjrx\",\"tglo\":\"dataveqbx\",\"jhyiey\":\"datafmlbhlimgzimtzz\",\"xalvdhmumsmnub\":\"datarwfu\"}},{\"type\":\"Activity\",\"name\":\"nxrpsty\",\"description\":\"idqnvhrbfepfwrin\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"s\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"xhlgoexudnbfoor\":\"dataxthsf\",\"xvriplgkwoo\":\"datatxdlewh\",\"vzzoo\":\"datam\",\"fh\":\"datawfo\"}}],\"userProperties\":[{\"name\":\"azljajzqggwarbv\",\"value\":\"datalatvb\"}],\"\":{\"odgisfejs\":\"datay\",\"wi\":\"datap\"}}],\"ifFalseActivities\":[{\"type\":\"Activity\",\"name\":\"ynfjwktiyhiy\",\"description\":\"tvaodif\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"twopsjrqhgnrxxh\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Failed\"],\"\":{\"dmvshgbjukas\":\"datauxyg\",\"akutdthloaf\":\"datagv\"}},{\"activity\":\"hiykatjse\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"ptefdvjgbemrjb\":\"datadl\",\"pdprjethyhbnoye\":\"datavqu\",\"iqoiblaumog\":\"datauivdrzxobtekl\"}}],\"userProperties\":[{\"name\":\"wm\",\"value\":\"datarj\"},{\"name\":\"tpgkybdktyvr\",\"value\":\"datamrqbeqzhnpx\"}],\"\":{\"q\":\"datacnnyga\",\"dn\":\"datapfrgouwef\"}},{\"type\":\"Activity\",\"name\":\"gathvlwhr\",\"description\":\"kdyqxjpzykk\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ybh\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"he\":\"datasdg\"}}],\"userProperties\":[{\"name\":\"rbojzfsznephb\",\"value\":\"dataurthmbgavw\"},{\"name\":\"qjetoaijayvu\",\"value\":\"datami\"}],\"\":{\"deqwxivjhmldvnox\":\"dataqjcxp\"}},{\"type\":\"Activity\",\"name\":\"bhltxtpgqqi\",\"description\":\"ktayafg\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"kkccixsgk\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Completed\"],\"\":{\"nlukeqzcbqv\":\"datazokuncqqhbjmvbe\",\"hmrughmrybbh\":\"dataejnwwqyyfctfs\"}},{\"activity\":\"tnuzorx\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"slcfxsgjd\":\"dataqpdfsautvit\",\"awf\":\"dataqemcghorr\",\"wgircfnzpybrflq\":\"datazbbvrmvhtm\",\"hpsugebgboq\":\"datatvlqwpmmmhupvx\"}},{\"activity\":\"ciii\",\"dependencyConditions\":[\"Succeeded\",\"Failed\"],\"\":{\"a\":\"datagfqgefxypxmkexjo\",\"wduwn\":\"datajdaxezfdsoglji\",\"hk\":\"dataaifwogqwdxtpmfa\",\"nigjoxhzcmgmcsj\":\"dataznnkmms\"}}],\"userProperties\":[{\"name\":\"utqloto\",\"value\":\"datafvbayqwj\"}],\"\":{\"huinjymnq\":\"dataoehhhkxlquupb\"}}]},\"name\":\"eptejryvvuktc\",\"description\":\"tp\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"kcqpyxjjutvovhu\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"qmbvssjbyne\":\"datavqyck\"}},{\"activity\":\"snncnnqi\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Succeeded\"],\"\":{\"mplt\":\"datau\"}}],\"userProperties\":[{\"name\":\"fndafrzi\",\"value\":\"datajcyxzan\"},{\"name\":\"aveyvdrulhworhze\",\"value\":\"dataqdvmxufrqpaw\"}],\"\":{\"wu\":\"datadohz\",\"hftlsfwpvflm\":\"datalae\",\"txbrj\":\"datajdu\",\"dmnymfvxfssh\":\"datapeypuq\"}}") - .toObject(IfConditionActivity.class); - Assertions.assertEquals("eptejryvvuktc", model.name()); - Assertions.assertEquals("tp", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("kcqpyxjjutvovhu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("fndafrzi", model.userProperties().get(0).name()); - Assertions.assertEquals("llnmddflck", model.expression().value()); - Assertions.assertEquals("jpxpwxabvxwoa", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("eillszdgyxozkm", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("nbdvbuxljiqyrwdm", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("rewhuqkic", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("ynfjwktiyhiy", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("tvaodif", model.ifFalseActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("twopsjrqhgnrxxh", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("wm", model.ifFalseActivities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IfConditionActivity model = new IfConditionActivity().withName("eptejryvvuktc") - .withDescription("tp") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("kcqpyxjjutvovhu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("snncnnqi") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("fndafrzi").withValue("datajcyxzan"), - new UserProperty().withName("aveyvdrulhworhze").withValue("dataqdvmxufrqpaw"))) - .withExpression(new Expression().withValue("llnmddflck")) - .withIfTrueActivities(Arrays.asList( - new Activity().withName("jpxpwxabvxwoa") - .withDescription("eillszdgyxozkm") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("nbdvbuxljiqyrwdm") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ly") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ecwyrtluujyespcg") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xmd") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("rewhuqkic").withValue("datamyykmk"), - new UserProperty().withName("lbq").withValue("datanrmgefxkattpkkw"), - new UserProperty().withName("dvksigxak").withValue("dataoptb"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("nxrpsty") - .withDescription("idqnvhrbfepfwrin") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("s") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("azljajzqggwarbv").withValue("datalatvb"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withIfFalseActivities(Arrays.asList( - new Activity().withName("ynfjwktiyhiy") - .withDescription("tvaodif") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("twopsjrqhgnrxxh") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hiykatjse") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("wm").withValue("datarj"), - new UserProperty().withName("tpgkybdktyvr").withValue("datamrqbeqzhnpx"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("gathvlwhr") - .withDescription("kdyqxjpzykk") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ybh") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("rbojzfsznephb").withValue("dataurthmbgavw"), - new UserProperty().withName("qjetoaijayvu").withValue("datami"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("bhltxtpgqqi") - .withDescription("ktayafg") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("kkccixsgk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("tnuzorx") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ciii") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("utqloto").withValue("datafvbayqwj"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(IfConditionActivity.class); - Assertions.assertEquals("eptejryvvuktc", model.name()); - Assertions.assertEquals("tp", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("kcqpyxjjutvovhu", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("fndafrzi", model.userProperties().get(0).name()); - Assertions.assertEquals("llnmddflck", model.expression().value()); - Assertions.assertEquals("jpxpwxabvxwoa", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("eillszdgyxozkm", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("nbdvbuxljiqyrwdm", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("rewhuqkic", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("ynfjwktiyhiy", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("tvaodif", model.ifFalseActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("twopsjrqhgnrxxh", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("wm", model.ifFalseActivities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java deleted file mode 100644 index 0f0d55f9eb6b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IfConditionActivityTypePropertiesTests.java +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IfConditionActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IfConditionActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IfConditionActivityTypeProperties model = BinaryData.fromString( - "{\"expression\":{\"value\":\"cx\"},\"ifTrueActivities\":[{\"type\":\"Activity\",\"name\":\"r\",\"description\":\"prdgmmgtqgzdfjf\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"umyuiquzfotf\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"zfpfxbqdrjunigxn\":\"datafacflkbgohxbj\",\"nvr\":\"datannghgazdbv\"}},{\"activity\":\"ti\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Completed\",\"Succeeded\"],\"\":{\"jepydjdpapndmvrb\":\"datahlxvn\",\"wkthmexidecdeh\":\"datatvvtapw\",\"xfhtsgyyrg\":\"datakmfiudnpj\",\"gqllgokznffqvtx\":\"dataguv\"}}],\"userProperties\":[{\"name\":\"ihhqancwgrwgd\",\"value\":\"datafzdy\"},{\"name\":\"tkrsn\",\"value\":\"datadfamyolvgkslaiu\"}],\"\":{\"tqcxoamxumwzduhi\":\"datarswvwzu\"}},{\"type\":\"Activity\",\"name\":\"omxvbruzxsnz\",\"description\":\"pgf\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"eozgnwmcizclnqe\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"ondr\":\"datapwdfjsj\",\"pqwrsfdpikx\":\"datancfoqdspslc\",\"aegrppwoligfljt\":\"datag\"}}],\"userProperties\":[{\"name\":\"cwzdwvyjzok\",\"value\":\"datayci\"},{\"name\":\"mywjcf\",\"value\":\"datamfoztwmvprn\"}],\"\":{\"xbiwnqewqt\":\"datasex\",\"galsparbjsvq\":\"dataztogihpylfdrye\",\"lvnosblc\":\"databvgemkze\"}}],\"ifFalseActivities\":[{\"type\":\"Activity\",\"name\":\"wac\",\"description\":\"hkpdcv\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ijuntmufhzcc\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Failed\",\"Failed\"],\"\":{\"xymtcwacavz\":\"datarfxrg\",\"xid\":\"datadybhydlq\",\"ihnsaespzwgpjrix\":\"datast\"}},{\"activity\":\"ao\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"gaiypihqmmm\":\"datamfcnh\",\"xnivvuwrvghlzr\":\"dataokdqkfbpf\",\"bfi\":\"datakgpipwtrt\",\"oawzkefzd\":\"databddhfkjsqqqu\"}},{\"activity\":\"yhvaovoqon\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Succeeded\"],\"\":{\"hmldwty\":\"dataqiytr\",\"vbbc\":\"databnmstflkfgz\",\"sbqw\":\"databcfecmcprggchf\"}}],\"userProperties\":[{\"name\":\"gvxhw\",\"value\":\"dataqipfrrvngill\"},{\"name\":\"mfbl\",\"value\":\"datagekouxurlifc\"}],\"\":{\"awqxrenjzlqbtef\":\"dataffoibxjgcuppws\"}}]}") - .toObject(IfConditionActivityTypeProperties.class); - Assertions.assertEquals("cx", model.expression().value()); - Assertions.assertEquals("r", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("prdgmmgtqgzdfjf", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("umyuiquzfotf", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ihhqancwgrwgd", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("wac", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("hkpdcv", model.ifFalseActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ijuntmufhzcc", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gvxhw", model.ifFalseActivities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IfConditionActivityTypeProperties model = new IfConditionActivityTypeProperties() - .withExpression(new Expression().withValue("cx")) - .withIfTrueActivities(Arrays.asList( - new Activity().withName("r") - .withDescription("prdgmmgtqgzdfjf") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency() - .withActivity("umyuiquzfotf") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ti") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ihhqancwgrwgd").withValue("datafzdy"), - new UserProperty().withName("tkrsn").withValue("datadfamyolvgkslaiu"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("omxvbruzxsnz") - .withDescription("pgf") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("eozgnwmcizclnqe") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("cwzdwvyjzok").withValue("datayci"), - new UserProperty().withName("mywjcf").withValue("datamfoztwmvprn"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withIfFalseActivities(Arrays.asList(new Activity().withName("wac") - .withDescription("hkpdcv") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ijuntmufhzcc") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ao") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yhvaovoqon") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("gvxhw").withValue("dataqipfrrvngill"), - new UserProperty().withName("mfbl").withValue("datagekouxurlifc"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(IfConditionActivityTypeProperties.class); - Assertions.assertEquals("cx", model.expression().value()); - Assertions.assertEquals("r", model.ifTrueActivities().get(0).name()); - Assertions.assertEquals("prdgmmgtqgzdfjf", model.ifTrueActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.ifTrueActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.ifTrueActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("umyuiquzfotf", model.ifTrueActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.ifTrueActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ihhqancwgrwgd", model.ifTrueActivities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("wac", model.ifFalseActivities().get(0).name()); - Assertions.assertEquals("hkpdcv", model.ifFalseActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.ifFalseActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.ifFalseActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ijuntmufhzcc", model.ifFalseActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.ifFalseActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("gvxhw", model.ifFalseActivities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java deleted file mode 100644 index f53f4f60dc6c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ImpalaDatasetTypeProperties; - -public final class ImpalaDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ImpalaDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"databjsarxsvmfpa\",\"table\":\"databpzgfgqpu\",\"schema\":\"datagxgieabbfp\"}") - .toObject(ImpalaDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ImpalaDatasetTypeProperties model = new ImpalaDatasetTypeProperties().withTableName("databjsarxsvmfpa") - .withTable("databpzgfgqpu") - .withSchema("datagxgieabbfp"); - model = BinaryData.fromObject(model).toObject(ImpalaDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java deleted file mode 100644 index 47917c438cd2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaObjectDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.ImpalaObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ImpalaObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ImpalaObjectDataset model = BinaryData.fromString( - "{\"type\":\"ImpalaObject\",\"typeProperties\":{\"tableName\":\"datahvpsuwichm\",\"table\":\"datazbyfkoc\",\"schema\":\"datazdct\"},\"description\":\"lwscrngtwgxrol\",\"structure\":\"dataplksdksutacucti\",\"schema\":\"datavis\",\"linkedServiceName\":{\"referenceName\":\"bvjh\",\"parameters\":{\"lphngrxlexoweoro\":\"datamqququ\",\"mtgbqpfy\":\"datarqicg\",\"nzhrplc\":\"datavhtvijvwmrg\"}},\"parameters\":{\"q\":{\"type\":\"Float\",\"defaultValue\":\"dataquuu\"},\"cmbkygvxjdqo\":{\"type\":\"Float\",\"defaultValue\":\"datatqyzyc\"}},\"annotations\":[\"datamdzlyb\"],\"folder\":{\"name\":\"fk\"},\"\":{\"sryjokvl\":\"datafkicxhsevmnkggh\"}}") - .toObject(ImpalaObjectDataset.class); - Assertions.assertEquals("lwscrngtwgxrol", model.description()); - Assertions.assertEquals("bvjh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("q").type()); - Assertions.assertEquals("fk", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ImpalaObjectDataset model = new ImpalaObjectDataset().withDescription("lwscrngtwgxrol") - .withStructure("dataplksdksutacucti") - .withSchema("datavis") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bvjh") - .withParameters( - mapOf("lphngrxlexoweoro", "datamqququ", "mtgbqpfy", "datarqicg", "nzhrplc", "datavhtvijvwmrg"))) - .withParameters(mapOf("q", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataquuu"), "cmbkygvxjdqo", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatqyzyc"))) - .withAnnotations(Arrays.asList("datamdzlyb")) - .withFolder(new DatasetFolder().withName("fk")) - .withTableName("datahvpsuwichm") - .withTable("datazbyfkoc") - .withSchemaTypePropertiesSchema("datazdct"); - model = BinaryData.fromObject(model).toObject(ImpalaObjectDataset.class); - Assertions.assertEquals("lwscrngtwgxrol", model.description()); - Assertions.assertEquals("bvjh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("q").type()); - Assertions.assertEquals("fk", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java deleted file mode 100644 index 21664311b56d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImpalaSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ImpalaSource; - -public final class ImpalaSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ImpalaSource model = BinaryData.fromString( - "{\"type\":\"ImpalaSource\",\"query\":\"dataayo\",\"queryTimeout\":\"dataetzcxlisvqfb\",\"additionalColumns\":\"dataizxp\",\"sourceRetryCount\":\"datapsaploex\",\"sourceRetryWait\":\"datamvlocd\",\"maxConcurrentConnections\":\"datahkob\",\"disableMetricsCollection\":\"datahhipn\",\"\":{\"n\":\"datadyriw\"}}") - .toObject(ImpalaSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ImpalaSource model = new ImpalaSource().withSourceRetryCount("datapsaploex") - .withSourceRetryWait("datamvlocd") - .withMaxConcurrentConnections("datahkob") - .withDisableMetricsCollection("datahhipn") - .withQueryTimeout("dataetzcxlisvqfb") - .withAdditionalColumns("dataizxp") - .withQuery("dataayo"); - model = BinaryData.fromObject(model).toObject(ImpalaSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java deleted file mode 100644 index b9bef43f8829..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ImportSettingsTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ImportSettings; -import java.util.HashMap; -import java.util.Map; - -public final class ImportSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ImportSettings model = BinaryData.fromString( - "{\"type\":\"ImportSettings\",\"\":{\"glwacsfbmbtcef\":\"datattptsdeequovan\",\"uslxyt\":\"datakuxgyumoamqxw\",\"nfpdilhzgjjeo\":\"dataobjledjxblob\"}}") - .toObject(ImportSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ImportSettings model = new ImportSettings().withAdditionalProperties(mapOf("type", "ImportSettings")); - model = BinaryData.fromObject(model).toObject(ImportSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java deleted file mode 100644 index 342b96b51c4f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.InformixSink; - -public final class InformixSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - InformixSink model = BinaryData.fromString( - "{\"type\":\"InformixSink\",\"preCopyScript\":\"dataozqqwiawbwzyvbui\",\"writeBatchSize\":\"dataysatoplqc\",\"writeBatchTimeout\":\"datasrlzwuqkprf\",\"sinkRetryCount\":\"datacowtoqfwbsbkob\",\"sinkRetryWait\":\"datassj\",\"maxConcurrentConnections\":\"datahfcxwrjbrxm\",\"disableMetricsCollection\":\"dataetttul\",\"\":{\"mosiskihf\":\"datajbhespf\"}}") - .toObject(InformixSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - InformixSink model = new InformixSink().withWriteBatchSize("dataysatoplqc") - .withWriteBatchTimeout("datasrlzwuqkprf") - .withSinkRetryCount("datacowtoqfwbsbkob") - .withSinkRetryWait("datassj") - .withMaxConcurrentConnections("datahfcxwrjbrxm") - .withDisableMetricsCollection("dataetttul") - .withPreCopyScript("dataozqqwiawbwzyvbui"); - model = BinaryData.fromObject(model).toObject(InformixSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java deleted file mode 100644 index 62e066b5a1ef..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.InformixSource; - -public final class InformixSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - InformixSource model = BinaryData.fromString( - "{\"type\":\"InformixSource\",\"query\":\"datab\",\"queryTimeout\":\"datapaeyw\",\"additionalColumns\":\"datatvyzuyqzjfv\",\"sourceRetryCount\":\"datayyjvzlscyz\",\"sourceRetryWait\":\"datasxmyzss\",\"maxConcurrentConnections\":\"datascqheixaz\",\"disableMetricsCollection\":\"datammjaigaxwq\",\"\":{\"ggtdvhokxxf\":\"datarctsh\"}}") - .toObject(InformixSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - InformixSource model = new InformixSource().withSourceRetryCount("datayyjvzlscyz") - .withSourceRetryWait("datasxmyzss") - .withMaxConcurrentConnections("datascqheixaz") - .withDisableMetricsCollection("datammjaigaxwq") - .withQueryTimeout("datapaeyw") - .withAdditionalColumns("datatvyzuyqzjfv") - .withQuery("datab"); - model = BinaryData.fromObject(model).toObject(InformixSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java deleted file mode 100644 index b48c6ff0af7f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.InformixTableDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class InformixTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - InformixTableDataset model = BinaryData.fromString( - "{\"type\":\"InformixTable\",\"typeProperties\":{\"tableName\":\"databzgottaksadzighm\"},\"description\":\"b\",\"structure\":\"datavucfvvraab\",\"schema\":\"datardeewl\",\"linkedServiceName\":{\"referenceName\":\"uxpcbwkdwj\",\"parameters\":{\"roo\":\"datazni\",\"taspmcrei\":\"datam\",\"hminuwqxungrobgw\":\"datauftrni\"}},\"parameters\":{\"geerclbl\":{\"type\":\"Object\",\"defaultValue\":\"datawdylwxmvzjowz\"},\"yeurjwmv\":{\"type\":\"Float\",\"defaultValue\":\"datapwac\"}},\"annotations\":[\"datavdifkiikg\",\"dataruccwmecbtxsytr\",\"dataexe\",\"datawmrqjyw\"],\"folder\":{\"name\":\"vycfjncindi\"},\"\":{\"riarsbcl\":\"datakajwj\"}}") - .toObject(InformixTableDataset.class); - Assertions.assertEquals("b", model.description()); - Assertions.assertEquals("uxpcbwkdwj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("geerclbl").type()); - Assertions.assertEquals("vycfjncindi", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - InformixTableDataset model = new InformixTableDataset().withDescription("b") - .withStructure("datavucfvvraab") - .withSchema("datardeewl") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uxpcbwkdwj") - .withParameters(mapOf("roo", "datazni", "taspmcrei", "datam", "hminuwqxungrobgw", "datauftrni"))) - .withParameters(mapOf("geerclbl", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawdylwxmvzjowz"), - "yeurjwmv", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapwac"))) - .withAnnotations(Arrays.asList("datavdifkiikg", "dataruccwmecbtxsytr", "dataexe", "datawmrqjyw")) - .withFolder(new DatasetFolder().withName("vycfjncindi")) - .withTableName("databzgottaksadzighm"); - model = BinaryData.fromObject(model).toObject(InformixTableDataset.class); - Assertions.assertEquals("b", model.description()); - Assertions.assertEquals("uxpcbwkdwj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("geerclbl").type()); - Assertions.assertEquals("vycfjncindi", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java deleted file mode 100644 index 3d3eaf21874d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/InformixTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.InformixTableDatasetTypeProperties; - -public final class InformixTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - InformixTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datanhzcknjxizb\"}") - .toObject(InformixTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - InformixTableDatasetTypeProperties model - = new InformixTableDatasetTypeProperties().withTableName("datanhzcknjxizb"); - model = BinaryData.fromObject(model).toObject(InformixTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java deleted file mode 100644 index b5b151854bc7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeComputePropertiesTests.java +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopyComputeScaleProperties; -import com.azure.resourcemanager.datafactory.models.DataFlowComputeType; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeComputeProperties; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowProperties; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeVNetProperties; -import com.azure.resourcemanager.datafactory.models.PipelineExternalComputeScaleProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeComputePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeComputeProperties model = BinaryData.fromString( - "{\"location\":\"vgcm\",\"nodeSize\":\"djlwwefevtwllq\",\"numberOfNodes\":2074211054,\"maxParallelExecutionsPerNode\":1209669623,\"dataFlowProperties\":{\"computeType\":\"MemoryOptimized\",\"coreCount\":1497350411,\"timeToLive\":1652665497,\"cleanup\":false,\"customProperties\":[{\"name\":\"hx\",\"value\":\"ckwbqwj\"},{\"name\":\"mmkwahrooy\",\"value\":\"obnvyue\"},{\"name\":\"coerohextigukfk\",\"value\":\"ycb\"}],\"\":{\"tqy\":\"databnpeenl\",\"jdsqephtoshqt\":\"datalfbsavqdv\",\"rcnxaeypyqoi\":\"dataarjwgu\"}},\"vNetProperties\":{\"vNetId\":\"asyddqbwscjwyy\",\"subnet\":\"miflrvfe\",\"publicIPs\":[\"tshwfrhhasabva\",\"znwwu\",\"kbpgci\",\"bwtpwbjlpfwuq\"],\"subnetId\":\"pdgitenyuksl\",\"\":{\"mmxlmxejwy\":\"datapnxghamrplanchqo\",\"psbeqieiux\":\"datajzbjwvtuekbbypqs\"}},\"copyComputeScaleProperties\":{\"dataIntegrationUnit\":105279113,\"timeToLive\":2051591206,\"\":{\"pcpihlzyxvta\":\"datanjjhrgkj\",\"oidneku\":\"datafja\",\"naqve\":\"datadgcpz\",\"xfbagegjtjltcki\":\"datagnpuelrnanbrpkoc\"}},\"pipelineExternalComputeScaleProperties\":{\"timeToLive\":605940544,\"numberOfPipelineNodes\":1567155782,\"numberOfExternalNodes\":2066455052,\"\":{\"invzsod\":\"datamdboefnhxhahuq\",\"sgkq\":\"dataokrqd\",\"pcjxh\":\"datajkot\"}},\"\":{\"p\":\"datalocjhz\",\"xbofpr\":\"databrbm\",\"wwsfvtgh\":\"datamivapesbfzllej\"}}") - .toObject(IntegrationRuntimeComputeProperties.class); - Assertions.assertEquals("vgcm", model.location()); - Assertions.assertEquals("djlwwefevtwllq", model.nodeSize()); - Assertions.assertEquals(2074211054, model.numberOfNodes()); - Assertions.assertEquals(1209669623, model.maxParallelExecutionsPerNode()); - Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.dataFlowProperties().computeType()); - Assertions.assertEquals(1497350411, model.dataFlowProperties().coreCount()); - Assertions.assertEquals(1652665497, model.dataFlowProperties().timeToLive()); - Assertions.assertEquals(false, model.dataFlowProperties().cleanup()); - Assertions.assertEquals("hx", model.dataFlowProperties().customProperties().get(0).name()); - Assertions.assertEquals("ckwbqwj", model.dataFlowProperties().customProperties().get(0).value()); - Assertions.assertEquals("asyddqbwscjwyy", model.vNetProperties().vNetId()); - Assertions.assertEquals("miflrvfe", model.vNetProperties().subnet()); - Assertions.assertEquals("tshwfrhhasabva", model.vNetProperties().publicIPs().get(0)); - Assertions.assertEquals("pdgitenyuksl", model.vNetProperties().subnetId()); - Assertions.assertEquals(105279113, model.copyComputeScaleProperties().dataIntegrationUnit()); - Assertions.assertEquals(2051591206, model.copyComputeScaleProperties().timeToLive()); - Assertions.assertEquals(605940544, model.pipelineExternalComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1567155782, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); - Assertions.assertEquals(2066455052, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeComputeProperties model = new IntegrationRuntimeComputeProperties().withLocation("vgcm") - .withNodeSize("djlwwefevtwllq") - .withNumberOfNodes(2074211054) - .withMaxParallelExecutionsPerNode(1209669623) - .withDataFlowProperties(new IntegrationRuntimeDataFlowProperties() - .withComputeType(DataFlowComputeType.MEMORY_OPTIMIZED) - .withCoreCount(1497350411) - .withTimeToLive(1652665497) - .withCleanup(false) - .withCustomProperties(Arrays.asList( - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("hx").withValue("ckwbqwj"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("mmkwahrooy") - .withValue("obnvyue"), - new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("coerohextigukfk") - .withValue("ycb"))) - .withAdditionalProperties(mapOf())) - .withVNetProperties(new IntegrationRuntimeVNetProperties().withVNetId("asyddqbwscjwyy") - .withSubnet("miflrvfe") - .withPublicIPs(Arrays.asList("tshwfrhhasabva", "znwwu", "kbpgci", "bwtpwbjlpfwuq")) - .withSubnetId("pdgitenyuksl") - .withAdditionalProperties(mapOf())) - .withCopyComputeScaleProperties(new CopyComputeScaleProperties().withDataIntegrationUnit(105279113) - .withTimeToLive(2051591206) - .withAdditionalProperties(mapOf())) - .withPipelineExternalComputeScaleProperties( - new PipelineExternalComputeScaleProperties().withTimeToLive(605940544) - .withNumberOfPipelineNodes(1567155782) - .withNumberOfExternalNodes(2066455052) - .withAdditionalProperties(mapOf())) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeComputeProperties.class); - Assertions.assertEquals("vgcm", model.location()); - Assertions.assertEquals("djlwwefevtwllq", model.nodeSize()); - Assertions.assertEquals(2074211054, model.numberOfNodes()); - Assertions.assertEquals(1209669623, model.maxParallelExecutionsPerNode()); - Assertions.assertEquals(DataFlowComputeType.MEMORY_OPTIMIZED, model.dataFlowProperties().computeType()); - Assertions.assertEquals(1497350411, model.dataFlowProperties().coreCount()); - Assertions.assertEquals(1652665497, model.dataFlowProperties().timeToLive()); - Assertions.assertEquals(false, model.dataFlowProperties().cleanup()); - Assertions.assertEquals("hx", model.dataFlowProperties().customProperties().get(0).name()); - Assertions.assertEquals("ckwbqwj", model.dataFlowProperties().customProperties().get(0).value()); - Assertions.assertEquals("asyddqbwscjwyy", model.vNetProperties().vNetId()); - Assertions.assertEquals("miflrvfe", model.vNetProperties().subnet()); - Assertions.assertEquals("tshwfrhhasabva", model.vNetProperties().publicIPs().get(0)); - Assertions.assertEquals("pdgitenyuksl", model.vNetProperties().subnetId()); - Assertions.assertEquals(105279113, model.copyComputeScaleProperties().dataIntegrationUnit()); - Assertions.assertEquals(2051591206, model.copyComputeScaleProperties().timeToLive()); - Assertions.assertEquals(605940544, model.pipelineExternalComputeScaleProperties().timeToLive()); - Assertions.assertEquals(1567155782, model.pipelineExternalComputeScaleProperties().numberOfPipelineNodes()); - Assertions.assertEquals(2066455052, model.pipelineExternalComputeScaleProperties().numberOfExternalNodes()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java deleted file mode 100644 index 6e27eefc4e6b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeCustomerVirtualNetworkTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeCustomerVirtualNetwork; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeCustomerVirtualNetworkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeCustomerVirtualNetwork model - = BinaryData.fromString("{\"subnetId\":\"yu\"}").toObject(IntegrationRuntimeCustomerVirtualNetwork.class); - Assertions.assertEquals("yu", model.subnetId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeCustomerVirtualNetwork model - = new IntegrationRuntimeCustomerVirtualNetwork().withSubnetId("yu"); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeCustomerVirtualNetwork.class); - Assertions.assertEquals("yu", model.subnetId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java deleted file mode 100644 index ee8e50078536..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeDataFlowPropertiesCustomPropertiesItemTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem model - = BinaryData.fromString("{\"name\":\"iexuwemtg\",\"value\":\"ebymmcgskscbsx\"}") - .toObject(IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.class); - Assertions.assertEquals("iexuwemtg", model.name()); - Assertions.assertEquals("ebymmcgskscbsx", model.value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem model - = new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("iexuwemtg") - .withValue("ebymmcgskscbsx"); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem.class); - Assertions.assertEquals("iexuwemtg", model.name()); - Assertions.assertEquals("ebymmcgskscbsx", model.value()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java deleted file mode 100644 index f84d5c25a7a4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataFlowPropertiesTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowComputeType; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowProperties; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeDataFlowPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeDataFlowProperties model = BinaryData.fromString( - "{\"computeType\":\"ComputeOptimized\",\"coreCount\":1721666597,\"timeToLive\":868667695,\"cleanup\":false,\"customProperties\":[{\"name\":\"ovelvsp\",\"value\":\"xjtezujtoudo\"}],\"\":{\"sr\":\"datawmv\"}}") - .toObject(IntegrationRuntimeDataFlowProperties.class); - Assertions.assertEquals(DataFlowComputeType.COMPUTE_OPTIMIZED, model.computeType()); - Assertions.assertEquals(1721666597, model.coreCount()); - Assertions.assertEquals(868667695, model.timeToLive()); - Assertions.assertEquals(false, model.cleanup()); - Assertions.assertEquals("ovelvsp", model.customProperties().get(0).name()); - Assertions.assertEquals("xjtezujtoudo", model.customProperties().get(0).value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeDataFlowProperties model - = new IntegrationRuntimeDataFlowProperties().withComputeType(DataFlowComputeType.COMPUTE_OPTIMIZED) - .withCoreCount(1721666597) - .withTimeToLive(868667695) - .withCleanup(false) - .withCustomProperties( - Arrays.asList(new IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem().withName("ovelvsp") - .withValue("xjtezujtoudo"))) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataFlowProperties.class); - Assertions.assertEquals(DataFlowComputeType.COMPUTE_OPTIMIZED, model.computeType()); - Assertions.assertEquals(1721666597, model.coreCount()); - Assertions.assertEquals(868667695, model.timeToLive()); - Assertions.assertEquals(false, model.cleanup()); - Assertions.assertEquals("ovelvsp", model.customProperties().get(0).name()); - Assertions.assertEquals("xjtezujtoudo", model.customProperties().get(0).value()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java deleted file mode 100644 index 9a159035fd85..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDataProxyPropertiesTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.EntityReference; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDataProxyProperties; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeEntityReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeDataProxyPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeDataProxyProperties model = BinaryData.fromString( - "{\"connectVia\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"zo\"},\"stagingLinkedService\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"gxuupc\"},\"path\":\"gqnaidvss\"}") - .toObject(IntegrationRuntimeDataProxyProperties.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.connectVia().type()); - Assertions.assertEquals("zo", model.connectVia().referenceName()); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.stagingLinkedService().type()); - Assertions.assertEquals("gxuupc", model.stagingLinkedService().referenceName()); - Assertions.assertEquals("gqnaidvss", model.path()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeDataProxyProperties model = new IntegrationRuntimeDataProxyProperties() - .withConnectVia( - new EntityReference().withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE) - .withReferenceName("zo")) - .withStagingLinkedService( - new EntityReference().withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE) - .withReferenceName("gxuupc")) - .withPath("gqnaidvss"); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDataProxyProperties.class); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.connectVia().type()); - Assertions.assertEquals("zo", model.connectVia().referenceName()); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.stagingLinkedService().type()); - Assertions.assertEquals("gxuupc", model.stagingLinkedService().referenceName()); - Assertions.assertEquals("gqnaidvss", model.path()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java deleted file mode 100644 index ed32c14dcc5d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeDebugResourceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeDebugResource; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeDebugResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"xbzlmc\",\"\":{\"onqzinkfkbgbzbow\":\"datapcvhdbevwqqxeys\",\"qkjjeokbz\":\"dataeqocljmygvk\"}},\"name\":\"ezrxcczurtleipqx\"}") - .toObject(IntegrationRuntimeDebugResource.class); - Assertions.assertEquals("ezrxcczurtleipqx", model.name()); - Assertions.assertEquals("xbzlmc", model.properties().description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeDebugResource model = new IntegrationRuntimeDebugResource().withName("ezrxcczurtleipqx") - .withProperties(new IntegrationRuntime().withDescription("xbzlmc") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeDebugResource.class); - Assertions.assertEquals("ezrxcczurtleipqx", model.name()); - Assertions.assertEquals("xbzlmc", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java deleted file mode 100644 index d58619a78f67..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeListResponseTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeListResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"hkaetcktvfc\",\"\":{\"jf\":\"datasnkymuctq\",\"fuwutttxf\":\"dataebrjcxe\",\"hfnljkyq\":\"datajrbirphxepcyv\"}},\"name\":\"vuujq\",\"type\":\"dokgjl\",\"etag\":\"oxgvclt\",\"id\":\"sncghkjeszz\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"ijhtxf\",\"\":{\"xnehmpvec\":\"databfs\"}},\"name\":\"odebfqkkrbmpu\",\"type\":\"riwflzlfb\",\"etag\":\"puz\",\"id\":\"ispnqzahmgkbrp\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"dhibnuq\",\"\":{\"drgvtqagn\":\"dataik\",\"mebf\":\"datauynhijg\"}},\"name\":\"arbu\",\"type\":\"cvpnazzmhjrunmpx\",\"etag\":\"dbhrbnlankxm\",\"id\":\"k\"},{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"henbtkcxywnytn\",\"\":{\"lhaaxdbabp\":\"datanlqidybyxczf\"}},\"name\":\"wrqlfktsthsuco\",\"type\":\"nyyazttbtwwrqpue\",\"etag\":\"kzywbiex\",\"id\":\"eyueaxibxujwb\"}],\"nextLink\":\"walm\"}") - .toObject(IntegrationRuntimeListResponse.class); - Assertions.assertEquals("sncghkjeszz", model.value().get(0).id()); - Assertions.assertEquals("hkaetcktvfc", model.value().get(0).properties().description()); - Assertions.assertEquals("walm", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeListResponse model - = new IntegrationRuntimeListResponse() - .withValue( - Arrays - .asList( - new IntegrationRuntimeResourceInner().withId("sncghkjeszz") - .withProperties(new IntegrationRuntime().withDescription("hkaetcktvfc") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("ispnqzahmgkbrp") - .withProperties(new IntegrationRuntime().withDescription("ijhtxf") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("k") - .withProperties(new IntegrationRuntime().withDescription("dhibnuq") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))), - new IntegrationRuntimeResourceInner().withId("eyueaxibxujwb") - .withProperties(new IntegrationRuntime().withDescription("henbtkcxywnytn") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))))) - .withNextLink("walm"); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeListResponse.class); - Assertions.assertEquals("sncghkjeszz", model.value().get(0).id()); - Assertions.assertEquals("hkaetcktvfc", model.value().get(0).properties().description()); - Assertions.assertEquals("walm", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java deleted file mode 100644 index ecbb1ef86837..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeMonitoringDataInnerTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeMonitoringDataInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeMonitoringData; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeMonitoringDataInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeMonitoringDataInner model = BinaryData.fromString( - "{\"name\":\"k\",\"nodes\":[{\"nodeName\":\"io\",\"availableMemoryInMB\":952194839,\"cpuUtilization\":1578257059,\"concurrentJobsLimit\":391809232,\"concurrentJobsRunning\":1039189909,\"maxConcurrentJobs\":1375924345,\"sentBytes\":68.58864,\"receivedBytes\":66.46081,\"\":{\"jooxdjebw\":\"datasowzxcugi\"}}]}") - .toObject(IntegrationRuntimeMonitoringDataInner.class); - Assertions.assertEquals("k", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeMonitoringDataInner model = new IntegrationRuntimeMonitoringDataInner().withName("k") - .withNodes(Arrays.asList(new IntegrationRuntimeNodeMonitoringData() - .withAdditionalProperties(mapOf("nodeName", "io", "cpuUtilization", 1578257059, "receivedBytes", - 66.46081f, "concurrentJobsLimit", 391809232, "concurrentJobsRunning", 1039189909, - "maxConcurrentJobs", 1375924345, "availableMemoryInMB", 952194839, "sentBytes", 68.58864f)))); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeMonitoringDataInner.class); - Assertions.assertEquals("k", model.name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java deleted file mode 100644 index 76c3160ed336..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeIpAddressInnerTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeNodeIpAddressInner; - -public final class IntegrationRuntimeNodeIpAddressInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeNodeIpAddressInner model - = BinaryData.fromString("{\"ipAddress\":\"nr\"}").toObject(IntegrationRuntimeNodeIpAddressInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeNodeIpAddressInner model = new IntegrationRuntimeNodeIpAddressInner(); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeNodeIpAddressInner.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java deleted file mode 100644 index 13152d8b690b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodeMonitoringDataTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeMonitoringData; -import java.util.HashMap; -import java.util.Map; - -public final class IntegrationRuntimeNodeMonitoringDataTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeNodeMonitoringData model = BinaryData.fromString( - "{\"nodeName\":\"cwwfvovbvme\",\"availableMemoryInMB\":321338352,\"cpuUtilization\":1542315989,\"concurrentJobsLimit\":1560245881,\"concurrentJobsRunning\":343837467,\"maxConcurrentJobs\":1240635196,\"sentBytes\":81.17821,\"receivedBytes\":67.35784,\"\":{\"wit\":\"datajueiotwmcdytd\"}}") - .toObject(IntegrationRuntimeNodeMonitoringData.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeNodeMonitoringData model = new IntegrationRuntimeNodeMonitoringData() - .withAdditionalProperties(mapOf("nodeName", "cwwfvovbvme", "cpuUtilization", 1542315989, "receivedBytes", - 67.35784f, "concurrentJobsLimit", 1560245881, "concurrentJobsRunning", 343837467, "maxConcurrentJobs", - 1240635196, "availableMemoryInMB", 321338352, "sentBytes", 81.17821f)); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeNodeMonitoringData.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java deleted file mode 100644 index fe49b1b7b3cb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeNodesDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimeNodes() - .deleteWithResponse("iguusbwmsyoybjt", "dg", "twkq", "ilf", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java deleted file mode 100644 index 53e05aa24a62..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetIpAddressWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeNodeIpAddress; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeNodesGetIpAddressWithResponseMockTests { - @Test - public void testGetIpAddressWithResponse() throws Exception { - String responseStr = "{\"ipAddress\":\"zrjsbw\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeNodeIpAddress response = manager.integrationRuntimeNodes() - .getIpAddressWithResponse("nglfcrtkpfsjwtq", "o", "eofjoqjmlzlki", "jssfwojfng", - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java deleted file mode 100644 index caeeec9884c0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesGetWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNode; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeNodesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"nodeName\":\"clpvwtwboxgrv\",\"machineName\":\"voq\",\"hostServiceUri\":\"quwkuszllognl\",\"status\":\"Offline\",\"capabilities\":{\"opulwd\":\"lenygimnfvqaqt\",\"flbch\":\"jbv\",\"eiakwdtuwbrw\":\"obo\"},\"versionStatus\":\"qtyuywzcc\",\"version\":\"kl\",\"registerTime\":\"2021-01-12T20:56:12Z\",\"lastConnectTime\":\"2021-11-02T23:11:41Z\",\"expiryTime\":\"2021-09-29T00:46:12Z\",\"lastStartTime\":\"2021-12-09T20:21:55Z\",\"lastStopTime\":\"2021-05-15T17:00:04Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-09-28T17:21:23Z\",\"lastEndUpdateTime\":\"2021-10-28T21:18:37Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":2020009199,\"maxConcurrentJobs\":1316905686,\"\":{\"mln\":\"dataxpayjselrfqstbfu\",\"ykenmjznj\":\"datafvbeyugggfshn\",\"suappdmu\":\"datarxyaaevrkxy\"}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SelfHostedIntegrationRuntimeNode response = manager.integrationRuntimeNodes() - .getWithResponse("yzer", "ezgi", "fisfmcxarh", "cuejtxxlkok", com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java deleted file mode 100644 index b88d72dbc004..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeNodesUpdateWithResponseMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeNode; -import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeNodesUpdateWithResponseMockTests { - @Test - public void testUpdateWithResponse() throws Exception { - String responseStr - = "{\"nodeName\":\"dibdbkgx\",\"machineName\":\"bwepduyqx\",\"hostServiceUri\":\"xarddbnqyayl\",\"status\":\"NeedRegistration\",\"capabilities\":{\"pxbjihz\":\"dprtpxwgt\",\"dnljpouz\":\"xndnbzhs\",\"ugqllq\":\"stytexu\"},\"versionStatus\":\"eyfuf\",\"version\":\"ioyw\",\"registerTime\":\"2021-04-12T09:41:48Z\",\"lastConnectTime\":\"2021-07-14T20:04:05Z\",\"expiryTime\":\"2021-03-23T09:37:38Z\",\"lastStartTime\":\"2021-12-05T19:22:06Z\",\"lastStopTime\":\"2021-09-27T23:49Z\",\"lastUpdateResult\":\"Succeed\",\"lastStartUpdateTime\":\"2021-08-11T04:14:48Z\",\"lastEndUpdateTime\":\"2021-06-16T16:09:13Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1334488394,\"maxConcurrentJobs\":1334330909,\"\":{\"pwom\":\"datatyzjqteqajip\",\"segyt\":\"datal\",\"nw\":\"datauktcqggxdnp\",\"lwoozlfliiru\":\"dataqag\"}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SelfHostedIntegrationRuntimeNode response = manager.integrationRuntimeNodes() - .updateWithResponse("vmc", "chbofqd", "qvjfszvecedoptez", "krerurcjgkau", - new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(943572631), - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java deleted file mode 100644 index cfb575ade454..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasGetWithResponseMockTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest; -import com.azure.resourcemanager.datafactory.models.SsisObjectMetadataListResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeObjectMetadatasGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"value\":[{\"type\":\"SsisObjectMetadata\",\"id\":5030800142299365599,\"name\":\"j\",\"description\":\"cbbabi\"},{\"type\":\"SsisObjectMetadata\",\"id\":4061395326445301764,\"name\":\"rxzatlzwrpj\",\"description\":\"cy\"},{\"type\":\"SsisObjectMetadata\",\"id\":3439179967454029059,\"name\":\"hsgreacp\",\"description\":\"dirxprxlgzpnrm\"},{\"type\":\"SsisObjectMetadata\",\"id\":7223056382740170989,\"name\":\"xtjuwobws\",\"description\":\"jlteiulvrpvhiv\"}],\"nextLink\":\"mzcvpoyhvfcwe\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SsisObjectMetadataListResponse response = manager.integrationRuntimeObjectMetadatas() - .getWithResponse("nxgqovfrtm", "yezrexmcawp", "ifzwojioc", - new GetSsisObjectMetadataRequest().withMetadataPath("myinpl"), com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals(5030800142299365599L, response.value().get(0).id()); - Assertions.assertEquals("j", response.value().get(0).name()); - Assertions.assertEquals("cbbabi", response.value().get(0).description()); - Assertions.assertEquals("mzcvpoyhvfcwe", response.nextLink()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java deleted file mode 100644 index ecb71943112f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeObjectMetadatasRefreshMockTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.SsisObjectMetadataStatusResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimeObjectMetadatasRefreshMockTests { - @Test - public void testRefresh() throws Exception { - String responseStr - = "{\"status\":\"gtbslagtmkiilcg\",\"name\":\"mjpvgvbzlmz\",\"properties\":\"zgrfaq\",\"error\":\"qmcszdptoyt\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SsisObjectMetadataStatusResponse response = manager.integrationRuntimeObjectMetadatas() - .refresh("xcahfoemcajj", "zoykw", "gnjhxydxicou", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("gtbslagtmkiilcg", response.status()); - Assertions.assertEquals("mjpvgvbzlmz", response.name()); - Assertions.assertEquals("zgrfaq", response.properties()); - Assertions.assertEquals("qmcszdptoyt", response.error()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java deleted file mode 100644 index f6690962838c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpointTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint model = BinaryData.fromString( - "{\"category\":\"bmehh\",\"endpoints\":[{\"domainName\":\"jusrtslhspk\",\"endpointDetails\":[{\"port\":812184412},{\"port\":441408788},{\"port\":1351571833}]},{\"domainName\":\"gkvtmelmqkrhah\",\"endpointDetails\":[{\"port\":1468596781},{\"port\":1935710102},{\"port\":1520620796},{\"port\":27642330}]},{\"domainName\":\"hmdua\",\"endpointDetails\":[{\"port\":552039222}]}]}") - .toObject(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.class); - Assertions.assertEquals("bmehh", model.category()); - Assertions.assertEquals("jusrtslhspk", model.endpoints().get(0).domainName()); - Assertions.assertEquals(812184412, model.endpoints().get(0).endpointDetails().get(0).port()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint model - = new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("bmehh") - .withEndpoints( - Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint() - .withDomainName("jusrtslhspk") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(812184412), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(441408788), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails() - .withPort(1351571833))), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("gkvtmelmqkrhah") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1468596781), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1935710102), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1520620796), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(27642330))), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("hmdua") - .withEndpointDetails( - Arrays.asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails() - .withPort(552039222))))); - model = BinaryData.fromObject(model) - .toObject(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint.class); - Assertions.assertEquals("bmehh", model.category()); - Assertions.assertEquals("jusrtslhspk", model.endpoints().get(0).domainName()); - Assertions.assertEquals(812184412, model.endpoints().get(0).endpointDetails().get(0).port()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java deleted file mode 100644 index 0577cb26b44a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetailsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails model - = BinaryData.fromString("{\"port\":434822175}") - .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.class); - Assertions.assertEquals(434822175, model.port()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails model - = new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(434822175); - model - = BinaryData.fromObject(model).toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails.class); - Assertions.assertEquals(434822175, model.port()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java deleted file mode 100644 index 0ec269d6d779..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpoint model - = BinaryData.fromString("{\"domainName\":\"vfadmws\",\"endpointDetails\":[{\"port\":1913869945}]}") - .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpoint.class); - Assertions.assertEquals("vfadmws", model.domainName()); - Assertions.assertEquals(1913869945, model.endpointDetails().get(0).port()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpoint model - = new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("vfadmws") - .withEndpointDetails(Arrays - .asList(new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails().withPort(1913869945))); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpoint.class); - Assertions.assertEquals("vfadmws", model.domainName()); - Assertions.assertEquals(1913869945, model.endpointDetails().get(0).port()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java deleted file mode 100644 index 6ed83e2b9030..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner model = BinaryData.fromString( - "{\"value\":[{\"category\":\"frlh\",\"endpoints\":[{\"domainName\":\"kyv\",\"endpointDetails\":[{}]},{\"domainName\":\"n\",\"endpointDetails\":[{},{},{}]},{\"domainName\":\"zka\",\"endpointDetails\":[{}]},{\"domainName\":\"b\",\"endpointDetails\":[{},{}]}]}]}") - .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.class); - Assertions.assertEquals("frlh", model.value().get(0).category()); - Assertions.assertEquals("kyv", model.value().get(0).endpoints().get(0).domainName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner model - = new IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner() - .withValue( - Arrays - .asList( - new IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint().withCategory("frlh") - .withEndpoints(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("kyv") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("n") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("zka") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())), - new IntegrationRuntimeOutboundNetworkDependenciesEndpoint().withDomainName("b") - .withEndpointDetails(Arrays.asList( - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(), - new IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails())))))); - model = BinaryData.fromObject(model) - .toObject(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponseInner.class); - Assertions.assertEquals("frlh", model.value().get(0).category()); - Assertions.assertEquals("kyv", model.value().get(0).endpoints().get(0).domainName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java deleted file mode 100644 index b1d4d8fc8ca0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeReferenceTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeReference model = BinaryData.fromString( - "{\"referenceName\":\"dggkzzlvmbmpa\",\"parameters\":{\"yw\":\"datadfvue\",\"yhrfouyftaakcpw\":\"databpfvm\",\"nubexk\":\"datayzvqt\"}}") - .toObject(IntegrationRuntimeReference.class); - Assertions.assertEquals("dggkzzlvmbmpa", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeReference model = new IntegrationRuntimeReference().withReferenceName("dggkzzlvmbmpa") - .withParameters(mapOf("yw", "datadfvue", "yhrfouyftaakcpw", "databpfvm", "nubexk", "datayzvqt")); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeReference.class); - Assertions.assertEquals("dggkzzlvmbmpa", model.referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java deleted file mode 100644 index f64d5a7367bd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeResourceInnerTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"yoxa\",\"\":{\"bniwdj\":\"datakzjancuxrhdwbav\",\"s\":\"datawz\",\"xytxhpzxbz\":\"databpg\"}},\"name\":\"zabglcuhxwt\",\"type\":\"yqiklbbovplwzb\",\"etag\":\"gy\",\"id\":\"uosvmkfssxqukk\"}") - .toObject(IntegrationRuntimeResourceInner.class); - Assertions.assertEquals("uosvmkfssxqukk", model.id()); - Assertions.assertEquals("yoxa", model.properties().description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeResourceInner model = new IntegrationRuntimeResourceInner().withId("uosvmkfssxqukk") - .withProperties(new IntegrationRuntime().withDescription("yoxa") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeResourceInner.class); - Assertions.assertEquals("uosvmkfssxqukk", model.id()); - Assertions.assertEquals("yoxa", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java deleted file mode 100644 index f1c2692bcb8f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusResponseInnerTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.IntegrationRuntimeStatusResponseInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus; -import java.util.HashMap; -import java.util.Map; - -public final class IntegrationRuntimeStatusResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeStatusResponseInner model = BinaryData.fromString( - "{\"name\":\"ogtwrupqsxvnmi\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"kvceoveilovnotyf\",\"state\":\"Limited\",\"\":{\"x\":\"databkc\",\"nv\":\"datahbttkphyw\",\"qnermclfplphoxu\":\"datat\",\"ye\":\"datacrpab\"}}}") - .toObject(IntegrationRuntimeStatusResponseInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeStatusResponseInner model = new IntegrationRuntimeStatusResponseInner() - .withProperties(new IntegrationRuntimeStatus().withAdditionalProperties( - mapOf("dataFactoryName", "kvceoveilovnotyf", "state", "Limited", "type", "IntegrationRuntimeStatus"))); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeStatusResponseInner.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java deleted file mode 100644 index 9cf69d515041..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeStatusTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatus; -import java.util.HashMap; -import java.util.Map; - -public final class IntegrationRuntimeStatusTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeStatus model = BinaryData.fromString( - "{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"bjtazqugxywpmu\",\"state\":\"Started\",\"\":{\"dsuyonobgla\":\"datawfqkquj\",\"tcc\":\"datacq\",\"udxytlmoyrx\":\"datag\",\"qj\":\"datawfudwpzntxhdzhl\"}}") - .toObject(IntegrationRuntimeStatus.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeStatus model = new IntegrationRuntimeStatus().withAdditionalProperties( - mapOf("dataFactoryName", "bjtazqugxywpmu", "state", "Started", "type", "IntegrationRuntimeStatus")); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeStatus.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java deleted file mode 100644 index 2eba42937a82..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntime model = BinaryData.fromString( - "{\"type\":\"IntegrationRuntime\",\"description\":\"l\",\"\":{\"wiyighxpkdw\":\"datasxnkjzkdeslpvlo\",\"upedeojnabckhs\":\"databaiuebbaumny\",\"ie\":\"datatxp\",\"jdhtldwkyzxu\":\"datatfhvpesapskrdqmh\"}}") - .toObject(IntegrationRuntime.class); - Assertions.assertEquals("l", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntime model = new IntegrationRuntime().withDescription("l") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime")); - model = BinaryData.fromObject(model).toObject(IntegrationRuntime.class); - Assertions.assertEquals("l", model.description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java deleted file mode 100644 index 4f8b41245ec2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimeVNetPropertiesTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeVNetProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class IntegrationRuntimeVNetPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - IntegrationRuntimeVNetProperties model = BinaryData.fromString( - "{\"vNetId\":\"ax\",\"subnet\":\"jojxolknsh\",\"publicIPs\":[\"kptbhmbgl\",\"nlbnatlnchzzcd\",\"xortd\"],\"subnetId\":\"vhbujkaho\",\"\":{\"wsr\":\"datawojdzccq\",\"exr\":\"datafbsdiicdzfb\"}}") - .toObject(IntegrationRuntimeVNetProperties.class); - Assertions.assertEquals("ax", model.vNetId()); - Assertions.assertEquals("jojxolknsh", model.subnet()); - Assertions.assertEquals("kptbhmbgl", model.publicIPs().get(0)); - Assertions.assertEquals("vhbujkaho", model.subnetId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - IntegrationRuntimeVNetProperties model = new IntegrationRuntimeVNetProperties().withVNetId("ax") - .withSubnet("jojxolknsh") - .withPublicIPs(Arrays.asList("kptbhmbgl", "nlbnatlnchzzcd", "xortd")) - .withSubnetId("vhbujkaho") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(IntegrationRuntimeVNetProperties.class); - Assertions.assertEquals("ax", model.vNetId()); - Assertions.assertEquals("jojxolknsh", model.subnet()); - Assertions.assertEquals("kptbhmbgl", model.publicIPs().get(0)); - Assertions.assertEquals("vhbujkaho", model.subnetId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java deleted file mode 100644 index 0a14b466ba01..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeWithResponseMockTests { - @Test - public void testCreateLinkedIntegrationRuntimeWithResponse() throws Exception { - String responseStr - = "{\"name\":\"iaognmanrzjprlq\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"wpejtszjbvjcvw\",\"state\":\"Started\",\"\":{\"hxwwhusrodr\":\"dataowzclijmd\",\"wkwmq\":\"datamozafwqmo\"}}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() - .createLinkedIntegrationRuntimeWithResponse("taeallsxfzantssb", "moq", "j", - new CreateLinkedIntegrationRuntimeRequest().withName("hmxkgxrfrm") - .withSubscriptionId("wpzuxoynxlkloqp") - .withDataFactoryName("aqcrefk") - .withDataFactoryLocation("dnzowpvrwecr"), - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index f443a976ca95..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"zfypdsrfpihvijsj\",\"\":{\"duyotqbfqt\":\"dataocqboyjjfxzn\"}},\"name\":\"tuxmegr\",\"type\":\"o\",\"etag\":\"zjlqrpsqpj\",\"id\":\"coibiodfybafenwv\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeResource response = manager.integrationRuntimes() - .define("gkzxznc") - .withExistingFactory("oqldacxo", "aqassukv") - .withProperties(new IntegrationRuntime().withDescription("ocznsz") - .withAdditionalProperties(mapOf("type", "IntegrationRuntime"))) - .withIfMatch("aazvmnv") - .create(); - - Assertions.assertEquals("coibiodfybafenwv", response.id()); - Assertions.assertEquals("zfypdsrfpihvijsj", response.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java deleted file mode 100644 index a5181385b3a6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes() - .deleteWithResponse("j", "gsfvyvnpu", "lqtdcasjnzeckp", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java deleted file mode 100644 index 28d82c7dea3a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetMonitoringDataWithResponseMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeMonitoringData; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesGetMonitoringDataWithResponseMockTests { - @Test - public void testGetMonitoringDataWithResponse() throws Exception { - String responseStr - = "{\"name\":\"nisinpkcww\",\"nodes\":[{\"nodeName\":\"b\",\"availableMemoryInMB\":26795964,\"cpuUtilization\":2078169809,\"concurrentJobsLimit\":1586982490,\"concurrentJobsRunning\":597607505,\"maxConcurrentJobs\":1140443642,\"sentBytes\":0.19900799,\"receivedBytes\":92.48324,\"\":{\"zxotwralnl\":\"datauptnhuybtmtokohy\",\"sarfmjschcxud\":\"datazlnrellwfgyabg\",\"qlltoiud\":\"dataomhhsumzfvrak\",\"bsbhaqsu\":\"dataeoibehrholjjxi\"}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeMonitoringData response = manager.integrationRuntimes() - .getMonitoringDataWithResponse("yuukhssretugorc", "kcsevq", "dwktogmcblwh", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("nisinpkcww", response.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java deleted file mode 100644 index a27e6a46b183..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetStatusWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesGetStatusWithResponseMockTests { - @Test - public void testGetStatusWithResponse() throws Exception { - String responseStr - = "{\"name\":\"nlq\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"eevzelmmwmdhm\",\"state\":\"Limited\",\"\":{\"jfmztpwujmu\":\"databbvmckpnmn\",\"zrfonqjnpkofj\":\"datatvyeyeb\",\"eyuirrrxrftfamo\":\"dataus\"}}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() - .getStatusWithResponse("pjkczkc", "lzicltwan", "bzycxvifkzspwvl", com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java deleted file mode 100644 index 641d22aa56ef..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesGetWithResponseMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"j\",\"\":{\"zg\":\"datapruommtuc\",\"mnjastkbzzyqbwim\":\"datalvkdaphz\",\"r\":\"datajhmgocal\",\"nreukcrcsdaip\":\"datakmwyoukfim\"}},\"name\":\"povr\",\"type\":\"otbybcx\",\"etag\":\"rlzdnccx\",\"id\":\"qpfgjnynuqikoiuj\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeResource response = manager.integrationRuntimes() - .getWithResponse("afa", "oornsktdgbombncj", "xkcpqw", "trqvlcunnb", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("qpfgjnynuqikoiuj", response.id()); - Assertions.assertEquals("j", response.properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java deleted file mode 100644 index 5cd1df1505fa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListByFactoryMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"IntegrationRuntime\",\"description\":\"eqzlvjlsyzzk\",\"\":{\"ntgi\":\"dataeydjagyks\",\"b\":\"dataaazfjbxhnah\",\"a\":\"datao\",\"gbyxpma\":\"datawidumilxi\"}},\"name\":\"rn\",\"type\":\"b\",\"etag\":\"iq\",\"id\":\"cti\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.integrationRuntimes().listByFactory("n", "rlygyjrlugigzw", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("cti", response.iterator().next().id()); - Assertions.assertEquals("eqzlvjlsyzzk", response.iterator().next().properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java deleted file mode 100644 index 60baaf2fcf9e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsWithResponseMockTests { - @Test - public void testListOutboundNetworkDependenciesEndpointsWithResponse() throws Exception { - String responseStr - = "{\"value\":[{\"category\":\"ixzwiehqvvbg\",\"endpoints\":[{\"domainName\":\"wticu\",\"endpointDetails\":[{}]},{\"domainName\":\"ubdmcd\",\"endpointDetails\":[{}]},{\"domainName\":\"lzhujcx\",\"endpointDetails\":[{},{},{}]}]}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse response = manager.integrationRuntimes() - .listOutboundNetworkDependenciesEndpointsWithResponse("yvjmwcflphqtqpc", "wmukzcrpd", "gzctfnlaklszbeu", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("ixzwiehqvvbg", response.value().get(0).category()); - Assertions.assertEquals("wticu", response.value().get(0).endpoints().get(0).domainName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java deleted file mode 100644 index 604a998c445f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesRemoveLinksWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesRemoveLinksWithResponseMockTests { - @Test - public void testRemoveLinksWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes() - .removeLinksWithResponse("nhmnswlf", "ukildlaytviwv", "jwtzki", - new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("dpssklm"), - com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java deleted file mode 100644 index f2312aa86278..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStartMockTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeStatusResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesStartMockTests { - @Test - public void testStart() throws Exception { - String responseStr - = "{\"name\":\"ua\",\"properties\":{\"type\":\"IntegrationRuntimeStatus\",\"dataFactoryName\":\"dj\",\"state\":\"NeedRegistration\",\"\":{\"w\":\"dataeij\",\"kfslm\":\"datajvuwaqiomdlp\",\"uwgrtvyw\":\"datawowmwrn\"}}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - IntegrationRuntimeStatusResponse response = manager.integrationRuntimes() - .start("hdzqvwlixhqotqs", "rnhlsfhfjwajsb", "ytfvjvmjh", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java deleted file mode 100644 index 8b83e377a464..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesStopMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesStopMockTests { - @Test - public void testStop() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes().stop("obg", "vhdb", "evyyppaycasch", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java deleted file mode 100644 index 6b74c1c63b25..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesSyncCredentialsWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesSyncCredentialsWithResponseMockTests { - @Test - public void testSyncCredentialsWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes() - .syncCredentialsWithResponse("fmidkdywppt", "ssvmdoxxcvug", "ryk", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java deleted file mode 100644 index 3fcf740d775e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/IntegrationRuntimesUpgradeWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class IntegrationRuntimesUpgradeWithResponseMockTests { - @Test - public void testUpgradeWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.integrationRuntimes() - .upgradeWithResponse("wdcgdkwwulj", "euj", "sxrsxbofmvau", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java deleted file mode 100644 index e9b50e327277..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.JiraObjectDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class JiraObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JiraObjectDataset model = BinaryData.fromString( - "{\"type\":\"JiraObject\",\"typeProperties\":{\"tableName\":\"datavlozukgsnu\"},\"description\":\"yx\",\"structure\":\"dataf\",\"schema\":\"dataxcebnbeo\",\"linkedServiceName\":{\"referenceName\":\"kemqqerwqx\",\"parameters\":{\"mdfkhttuobr\":\"datav\",\"wtfma\":\"datazmhytebjkjgee\",\"mhlvyqn\":\"datavbmnhtwofx\"}},\"parameters\":{\"qqrugwespscvs\":{\"type\":\"String\",\"defaultValue\":\"datamlqkiekhj\"}},\"annotations\":[\"datatluwozfvzasupc\",\"dataqgxcvwio\",\"datahcmcgm\",\"datam\"],\"folder\":{\"name\":\"qxuyi\"},\"\":{\"dqseypdlmajpuy\":\"datan\"}}") - .toObject(JiraObjectDataset.class); - Assertions.assertEquals("yx", model.description()); - Assertions.assertEquals("kemqqerwqx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("qqrugwespscvs").type()); - Assertions.assertEquals("qxuyi", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JiraObjectDataset model = new JiraObjectDataset().withDescription("yx") - .withStructure("dataf") - .withSchema("dataxcebnbeo") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kemqqerwqx") - .withParameters( - mapOf("mdfkhttuobr", "datav", "wtfma", "datazmhytebjkjgee", "mhlvyqn", "datavbmnhtwofx"))) - .withParameters(mapOf("qqrugwespscvs", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datamlqkiekhj"))) - .withAnnotations(Arrays.asList("datatluwozfvzasupc", "dataqgxcvwio", "datahcmcgm", "datam")) - .withFolder(new DatasetFolder().withName("qxuyi")) - .withTableName("datavlozukgsnu"); - model = BinaryData.fromObject(model).toObject(JiraObjectDataset.class); - Assertions.assertEquals("yx", model.description()); - Assertions.assertEquals("kemqqerwqx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("qqrugwespscvs").type()); - Assertions.assertEquals("qxuyi", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java deleted file mode 100644 index d794baf16840..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JiraSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.JiraSource; - -public final class JiraSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JiraSource model = BinaryData.fromString( - "{\"type\":\"JiraSource\",\"query\":\"datadgmqscijlfulxg\",\"queryTimeout\":\"dataas\",\"additionalColumns\":\"datamwsooq\",\"sourceRetryCount\":\"datavplmyzebvgh\",\"sourceRetryWait\":\"dataydehbvbexrbynnl\",\"maxConcurrentConnections\":\"datad\",\"disableMetricsCollection\":\"datak\",\"\":{\"npscfkef\":\"datazsicbbosac\"}}") - .toObject(JiraSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JiraSource model = new JiraSource().withSourceRetryCount("datavplmyzebvgh") - .withSourceRetryWait("dataydehbvbexrbynnl") - .withMaxConcurrentConnections("datad") - .withDisableMetricsCollection("datak") - .withQueryTimeout("dataas") - .withAdditionalColumns("datamwsooq") - .withQuery("datadgmqscijlfulxg"); - model = BinaryData.fromObject(model).toObject(JiraSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java deleted file mode 100644 index 2abd031d16a7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTests.java +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.azure.resourcemanager.datafactory.models.JsonDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class JsonDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonDataset model = BinaryData.fromString( - "{\"type\":\"Json\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"dataf\",\"fileName\":\"datavb\",\"\":{\"oorssatfy\":\"datawhgkgsoa\",\"as\":\"dataipufdmxuqbdq\",\"ixhg\":\"datatuxvzfqayopbt\"}},\"encodingName\":\"datahx\",\"compression\":{\"type\":\"datadztgsqja\",\"level\":\"datamar\",\"\":{\"motpuwnnoh\":\"dataibpgbrhbjdqknhq\",\"wyiulaynosu\":\"datamzngocfrjuy\",\"ucumlddauqml\":\"datakfhaxttpfsmwgsgh\",\"rigrjdljlkq\":\"datafeothxu\"}}},\"description\":\"krbzkuas\",\"structure\":\"dataxk\",\"schema\":\"datar\",\"linkedServiceName\":{\"referenceName\":\"ulhgltoiz\",\"parameters\":{\"gafxczvf\":\"datascksgfyyskye\",\"ety\":\"datackwrtw\",\"lhpdhwynctaczcnj\":\"dataeszr\"}},\"parameters\":{\"tiklsm\":{\"type\":\"SecureString\",\"defaultValue\":\"datanvjxitzovnk\"},\"bgbudav\":{\"type\":\"Object\",\"defaultValue\":\"dataqlcoqksyiibhyx\"},\"ojvlirknucosaw\":{\"type\":\"SecureString\",\"defaultValue\":\"datarbccqcdht\"},\"yaixihzqj\":{\"type\":\"Float\",\"defaultValue\":\"datanagzlg\"}},\"annotations\":[\"datamuydqfttk\",\"datacybdueurgm\",\"datadcpks\"],\"folder\":{\"name\":\"jermhzic\"},\"\":{\"hyaaknyukibxiglh\":\"datad\",\"ejp\":\"datahzwxq\",\"xkbylhyyxgffklv\":\"datailhvtozyagjj\",\"wtrdgs\":\"dataz\"}}") - .toObject(JsonDataset.class); - Assertions.assertEquals("krbzkuas", model.description()); - Assertions.assertEquals("ulhgltoiz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("tiklsm").type()); - Assertions.assertEquals("jermhzic", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonDataset model - = new JsonDataset().withDescription("krbzkuas") - .withStructure("dataxk") - .withSchema("datar") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ulhgltoiz") - .withParameters( - mapOf("gafxczvf", "datascksgfyyskye", "ety", "datackwrtw", "lhpdhwynctaczcnj", "dataeszr"))) - .withParameters(mapOf("tiklsm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datanvjxitzovnk"), - "bgbudav", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataqlcoqksyiibhyx"), - "ojvlirknucosaw", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datarbccqcdht"), - "yaixihzqj", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datanagzlg"))) - .withAnnotations(Arrays.asList("datamuydqfttk", "datacybdueurgm", "datadcpks")) - .withFolder(new DatasetFolder().withName("jermhzic")) - .withLocation(new DatasetLocation().withFolderPath("dataf") - .withFileName("datavb") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("datahx") - .withCompression(new DatasetCompression().withType("datadztgsqja") - .withLevel("datamar") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(JsonDataset.class); - Assertions.assertEquals("krbzkuas", model.description()); - Assertions.assertEquals("ulhgltoiz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("tiklsm").type()); - Assertions.assertEquals("jermhzic", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java deleted file mode 100644 index 97036e90fd5f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonDatasetTypePropertiesTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.JsonDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import java.util.HashMap; -import java.util.Map; - -public final class JsonDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datavq\",\"fileName\":\"datansgnwxlwmez\",\"\":{\"fve\":\"dataxpthceopvkvt\",\"cdyarnggcjfwblqh\":\"dataobpbokhmm\",\"s\":\"datakasmcolmugpyva\"}},\"encodingName\":\"datal\",\"compression\":{\"type\":\"datazxeygzvtye\",\"level\":\"datahubnobgu\",\"\":{\"pdmioyj\":\"dataqsq\",\"cpszpmcvqdvrdmv\":\"datanmlvi\",\"hgvqojbxaotcgbz\":\"datayrx\",\"oioyidoxznvgvd\":\"datambtple\"}}}") - .toObject(JsonDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonDatasetTypeProperties model = new JsonDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datavq") - .withFileName("datansgnwxlwmez") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("datal") - .withCompression(new DatasetCompression().withType("datazxeygzvtye") - .withLevel("datahubnobgu") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(JsonDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java deleted file mode 100644 index ea4da1be467d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonFormatTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.JsonFormat; - -public final class JsonFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonFormat model = BinaryData.fromString( - "{\"type\":\"JsonFormat\",\"filePattern\":\"datazdqwuzvcm\",\"nestingSeparator\":\"datakxizek\",\"encodingName\":\"datafrjwucaon\",\"jsonNodeReference\":\"dataajbvbn\",\"jsonPathDefinition\":\"datademdidackzi\",\"serializer\":\"datazwdydamisvpztdi\",\"deserializer\":\"datakpxkqejtpjfoj\",\"\":{\"oiboan\":\"datarlshxuknsykd\",\"uld\":\"datadrcoanv\"}}") - .toObject(JsonFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonFormat model = new JsonFormat().withSerializer("datazwdydamisvpztdi") - .withDeserializer("datakpxkqejtpjfoj") - .withFilePattern("datazdqwuzvcm") - .withNestingSeparator("datakxizek") - .withEncodingName("datafrjwucaon") - .withJsonNodeReference("dataajbvbn") - .withJsonPathDefinition("datademdidackzi"); - model = BinaryData.fromObject(model).toObject(JsonFormat.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java deleted file mode 100644 index 94cf4401d1a4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonReadSettingsTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.JsonReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class JsonReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonReadSettings model = BinaryData.fromString( - "{\"type\":\"JsonReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"mllegucemagsyljw\":\"dataddngq\",\"kqvn\":\"datapzqhphlajm\",\"zrgyrldoalldglz\":\"datajkjopbg\"}},\"\":{\"zkxaujj\":\"dataftjrederkvbdvl\",\"fyftgaetcpl\":\"datayztghdwrvffjpwsz\",\"xv\":\"dataidiuxzzhld\",\"vrbb\":\"datarpuwacfqn\"}}") - .toObject(JsonReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonReadSettings model = new JsonReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); - model = BinaryData.fromObject(model).toObject(JsonReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java deleted file mode 100644 index a3f70f4a2547..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSinkTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.JsonSink; -import com.azure.resourcemanager.datafactory.models.JsonWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class JsonSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonSink model = BinaryData.fromString( - "{\"type\":\"JsonSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataocppgsfjnjgmogmc\",\"disableMetricsCollection\":\"dataqzukbwyp\",\"copyBehavior\":\"dataqfzvyoxgeriz\",\"metadata\":[{\"name\":\"datakovopqpfcdp\",\"value\":\"dataqmwpmrlgjjqs\"},{\"name\":\"datap\",\"value\":\"dataamvrejkvcimq\"},{\"name\":\"datad\",\"value\":\"datahhwtgcgefayc\"}],\"\":{\"x\":\"dataotbj\",\"xl\":\"dataziotxnpovf\",\"disupn\":\"datavms\"}},\"formatSettings\":{\"type\":\"JsonWriteSettings\",\"filePattern\":\"datauozdvokxuyhhrd\",\"\":{\"aaznzaxzfhh\":\"databqeahgsibldxyaq\",\"ryalkfdxauih\":\"datagyxkg\",\"q\":\"databrdhkdwye\",\"gppdq\":\"dataxuffgjynminhvdkq\"}},\"writeBatchSize\":\"datasapweaxthuhur\",\"writeBatchTimeout\":\"datau\",\"sinkRetryCount\":\"dataotapstkdbnqjpcu\",\"sinkRetryWait\":\"dataxoymfkumbysgsqz\",\"maxConcurrentConnections\":\"datarvf\",\"disableMetricsCollection\":\"datayph\",\"\":{\"tvikfenmifl\":\"dataxrpah\",\"hxtabli\":\"datayfqllolnxhsupi\"}}") - .toObject(JsonSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonSink model = new JsonSink().withWriteBatchSize("datasapweaxthuhur") - .withWriteBatchTimeout("datau") - .withSinkRetryCount("dataotapstkdbnqjpcu") - .withSinkRetryWait("dataxoymfkumbysgsqz") - .withMaxConcurrentConnections("datarvf") - .withDisableMetricsCollection("datayph") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataocppgsfjnjgmogmc") - .withDisableMetricsCollection("dataqzukbwyp") - .withCopyBehavior("dataqfzvyoxgeriz") - .withMetadata( - Arrays.asList(new MetadataItem().withName("datakovopqpfcdp").withValue("dataqmwpmrlgjjqs"), - new MetadataItem().withName("datap").withValue("dataamvrejkvcimq"), - new MetadataItem().withName("datad").withValue("datahhwtgcgefayc"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings(new JsonWriteSettings().withFilePattern("datauozdvokxuyhhrd")); - model = BinaryData.fromObject(model).toObject(JsonSink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java deleted file mode 100644 index d9301a3100aa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonSourceTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.JsonReadSettings; -import com.azure.resourcemanager.datafactory.models.JsonSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class JsonSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonSource model = BinaryData.fromString( - "{\"type\":\"JsonSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataksq\",\"disableMetricsCollection\":\"dataxdleohysdgkbaxy\",\"\":{\"d\":\"datatkrqiyu\",\"otaaqyxkloabco\":\"datajjqztrpjmeip\",\"bvolivianklqclft\":\"dataqaavjkrepqasviy\"}},\"formatSettings\":{\"type\":\"JsonReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"roaqzekggurwx\":\"dataxpdnlbpnbckoh\",\"xg\":\"datas\",\"grzfs\":\"datawfiyuof\"}},\"\":{\"tjbvhui\":\"dataookwnz\",\"vmmmweljca\":\"datalodcqsylkk\"}},\"additionalColumns\":\"datagymcm\",\"sourceRetryCount\":\"datamnjitxughlbi\",\"sourceRetryWait\":\"datao\",\"maxConcurrentConnections\":\"datayip\",\"disableMetricsCollection\":\"datahkioec\",\"\":{\"f\":\"dataoxpvbvfc\",\"d\":\"dataxuqpddebokzdshh\",\"zzkzvfywspaja\":\"dataxnzapzibmst\"}}") - .toObject(JsonSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonSource model = new JsonSource().withSourceRetryCount("datamnjitxughlbi") - .withSourceRetryWait("datao") - .withMaxConcurrentConnections("datayip") - .withDisableMetricsCollection("datahkioec") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("dataksq") - .withDisableMetricsCollection("dataxdleohysdgkbaxy") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new JsonReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datagymcm"); - model = BinaryData.fromObject(model).toObject(JsonSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java deleted file mode 100644 index b97f86c261a5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/JsonWriteSettingsTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.JsonWriteSettings; - -public final class JsonWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - JsonWriteSettings model = BinaryData.fromString( - "{\"type\":\"JsonWriteSettings\",\"filePattern\":\"dataezmznoejqduyq\",\"\":{\"x\":\"datayzajdpbnbp\"}}") - .toObject(JsonWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - JsonWriteSettings model = new JsonWriteSettings().withFilePattern("dataezmznoejqduyq"); - model = BinaryData.fromObject(model).toObject(JsonWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java deleted file mode 100644 index c1782913da51..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseLocationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LakeHouseLocation; - -public final class LakeHouseLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseLocation model = BinaryData.fromString( - "{\"type\":\"LakeHouseLocation\",\"folderPath\":\"databtvkbi\",\"fileName\":\"datanhtfgfi\",\"\":{\"bcakzn\":\"datayhizpaczmu\"}}") - .toObject(LakeHouseLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseLocation model = new LakeHouseLocation().withFolderPath("databtvkbi").withFileName("datanhtfgfi"); - model = BinaryData.fromObject(model).toObject(LakeHouseLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java deleted file mode 100644 index a7bf745453e5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseReadSettingsTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LakeHouseReadSettings; - -public final class LakeHouseReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseReadSettings model = BinaryData.fromString( - "{\"type\":\"LakeHouseReadSettings\",\"recursive\":\"datatj\",\"wildcardFolderPath\":\"dataymdypsxkwwfy\",\"wildcardFileName\":\"datahurklowm\",\"fileListPath\":\"datavzc\",\"enablePartitionDiscovery\":\"datalyoix\",\"partitionRootPath\":\"dataiznqizvsihsmtxj\",\"deleteFilesAfterCompletion\":\"datajhsjuqqtzr\",\"modifiedDatetimeStart\":\"databodgo\",\"modifiedDatetimeEnd\":\"datakia\",\"maxConcurrentConnections\":\"datagugrjxx\",\"disableMetricsCollection\":\"datagorvumwnbzslmff\",\"\":{\"kd\":\"datakprxypxti\",\"omev\":\"dataebafiq\",\"butytoainig\":\"dataetamdvncxt\",\"lack\":\"dataxhzqgbaqvqe\"}}") - .toObject(LakeHouseReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseReadSettings model = new LakeHouseReadSettings().withMaxConcurrentConnections("datagugrjxx") - .withDisableMetricsCollection("datagorvumwnbzslmff") - .withRecursive("datatj") - .withWildcardFolderPath("dataymdypsxkwwfy") - .withWildcardFileName("datahurklowm") - .withFileListPath("datavzc") - .withEnablePartitionDiscovery("datalyoix") - .withPartitionRootPath("dataiznqizvsihsmtxj") - .withDeleteFilesAfterCompletion("datajhsjuqqtzr") - .withModifiedDatetimeStart("databodgo") - .withModifiedDatetimeEnd("datakia"); - model = BinaryData.fromObject(model).toObject(LakeHouseReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java deleted file mode 100644 index 343d76fe913b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LakeHouseTableDataset; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LakeHouseTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseTableDataset model = BinaryData.fromString( - "{\"type\":\"LakeHouseTable\",\"typeProperties\":{\"schema\":\"datagva\",\"table\":\"datar\"},\"description\":\"xlmbrtvtgolmlp\",\"structure\":\"datatlayyxhxj\",\"schema\":\"datays\",\"linkedServiceName\":{\"referenceName\":\"aqqjh\",\"parameters\":{\"njc\":\"datafaob\",\"qwssyd\":\"databozvc\",\"ywo\":\"datawrybi\"}},\"parameters\":{\"pkv\":{\"type\":\"Int\",\"defaultValue\":\"datavtzijrdlxbaeyo\"},\"mnpbdrcibj\":{\"type\":\"Int\",\"defaultValue\":\"datafdz\"}},\"annotations\":[\"datanoztnhvd\",\"datau\",\"dataamqobqehs\",\"datasht\"],\"folder\":{\"name\":\"zfeoctrzjw\"},\"\":{\"mbvwdxgy\":\"datackze\",\"l\":\"datapmxqzl\"}}") - .toObject(LakeHouseTableDataset.class); - Assertions.assertEquals("xlmbrtvtgolmlp", model.description()); - Assertions.assertEquals("aqqjh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("pkv").type()); - Assertions.assertEquals("zfeoctrzjw", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseTableDataset model = new LakeHouseTableDataset().withDescription("xlmbrtvtgolmlp") - .withStructure("datatlayyxhxj") - .withSchema("datays") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aqqjh") - .withParameters(mapOf("njc", "datafaob", "qwssyd", "databozvc", "ywo", "datawrybi"))) - .withParameters(mapOf("pkv", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datavtzijrdlxbaeyo"), - "mnpbdrcibj", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datafdz"))) - .withAnnotations(Arrays.asList("datanoztnhvd", "datau", "dataamqobqehs", "datasht")) - .withFolder(new DatasetFolder().withName("zfeoctrzjw")) - .withSchemaTypePropertiesSchema("datagva") - .withTable("datar"); - model = BinaryData.fromObject(model).toObject(LakeHouseTableDataset.class); - Assertions.assertEquals("xlmbrtvtgolmlp", model.description()); - Assertions.assertEquals("aqqjh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("pkv").type()); - Assertions.assertEquals("zfeoctrzjw", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java deleted file mode 100644 index 626e2e0f1e14..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.LakeHouseTableDatasetTypeProperties; - -public final class LakeHouseTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datarcatkuhskegdkvv\",\"table\":\"datal\"}") - .toObject(LakeHouseTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseTableDatasetTypeProperties model - = new LakeHouseTableDatasetTypeProperties().withSchema("datarcatkuhskegdkvv").withTable("datal"); - model = BinaryData.fromObject(model).toObject(LakeHouseTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java deleted file mode 100644 index fa2c49f09274..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSinkTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LakeHouseTableSink; - -public final class LakeHouseTableSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseTableSink model = BinaryData.fromString( - "{\"type\":\"LakeHouseTableSink\",\"tableActionOption\":\"dataffjie\",\"partitionOption\":\"datamhsqyjsw\",\"partitionNameList\":\"datahafcoayuq\",\"writeBatchSize\":\"datatghjmmjmmjnxh\",\"writeBatchTimeout\":\"datajtsemnidbaykvlrs\",\"sinkRetryCount\":\"dataniocyo\",\"sinkRetryWait\":\"dataimbchi\",\"maxConcurrentConnections\":\"datawaffsjqn\",\"disableMetricsCollection\":\"datacybugojzcarg\",\"\":{\"lvaa\":\"dataa\"}}") - .toObject(LakeHouseTableSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseTableSink model = new LakeHouseTableSink().withWriteBatchSize("datatghjmmjmmjnxh") - .withWriteBatchTimeout("datajtsemnidbaykvlrs") - .withSinkRetryCount("dataniocyo") - .withSinkRetryWait("dataimbchi") - .withMaxConcurrentConnections("datawaffsjqn") - .withDisableMetricsCollection("datacybugojzcarg") - .withTableActionOption("dataffjie") - .withPartitionOption("datamhsqyjsw") - .withPartitionNameList("datahafcoayuq"); - model = BinaryData.fromObject(model).toObject(LakeHouseTableSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java deleted file mode 100644 index 551700f35034..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseTableSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LakeHouseTableSource; - -public final class LakeHouseTableSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseTableSource model = BinaryData.fromString( - "{\"type\":\"LakeHouseTableSource\",\"timestampAsOf\":\"dataiwcgcwmshlpq\",\"versionAsOf\":\"dataxhdwjfxopzclka\",\"additionalColumns\":\"datauomga\",\"sourceRetryCount\":\"datac\",\"sourceRetryWait\":\"datajjfmzv\",\"maxConcurrentConnections\":\"databflyzc\",\"disableMetricsCollection\":\"datamlybsy\",\"\":{\"bt\":\"datanvtvbfpuml\"}}") - .toObject(LakeHouseTableSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseTableSource model = new LakeHouseTableSource().withSourceRetryCount("datac") - .withSourceRetryWait("datajjfmzv") - .withMaxConcurrentConnections("databflyzc") - .withDisableMetricsCollection("datamlybsy") - .withTimestampAsOf("dataiwcgcwmshlpq") - .withVersionAsOf("dataxhdwjfxopzclka") - .withAdditionalColumns("datauomga"); - model = BinaryData.fromObject(model).toObject(LakeHouseTableSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java deleted file mode 100644 index a02108c9a541..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LakeHouseWriteSettingsTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LakeHouseWriteSettings; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import java.util.Arrays; - -public final class LakeHouseWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LakeHouseWriteSettings model = BinaryData.fromString( - "{\"type\":\"LakeHouseWriteSettings\",\"maxConcurrentConnections\":\"datasgpdbhbdxsjsox\",\"disableMetricsCollection\":\"datawuungdvvddr\",\"copyBehavior\":\"dataquyin\",\"metadata\":[{\"name\":\"datavonwrpel\",\"value\":\"datamitmtkcqixgqxs\"}],\"\":{\"wqumecqyianjmv\":\"datavthuvupdsafqag\"}}") - .toObject(LakeHouseWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LakeHouseWriteSettings model = new LakeHouseWriteSettings().withMaxConcurrentConnections("datasgpdbhbdxsjsox") - .withDisableMetricsCollection("datawuungdvvddr") - .withCopyBehavior("dataquyin") - .withMetadata(Arrays.asList(new MetadataItem().withName("datavonwrpel").withValue("datamitmtkcqixgqxs"))); - model = BinaryData.fromObject(model).toObject(LakeHouseWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java deleted file mode 100644 index 920515582c33..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeRequestTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest; -import org.junit.jupiter.api.Assertions; - -public final class LinkedIntegrationRuntimeRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedIntegrationRuntimeRequest model - = BinaryData.fromString("{\"factoryName\":\"nrjawgqwg\"}").toObject(LinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("nrjawgqwg", model.linkedFactoryName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedIntegrationRuntimeRequest model - = new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("nrjawgqwg"); - model = BinaryData.fromObject(model).toObject(LinkedIntegrationRuntimeRequest.class); - Assertions.assertEquals("nrjawgqwg", model.linkedFactoryName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java deleted file mode 100644 index a6749667b506..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedIntegrationRuntimeTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntime; - -public final class LinkedIntegrationRuntimeTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedIntegrationRuntime model = BinaryData.fromString( - "{\"name\":\"sbufnh\",\"subscriptionId\":\"cn\",\"dataFactoryName\":\"oee\",\"dataFactoryLocation\":\"rsljzmzuicsgg\",\"createTime\":\"2021-03-20T13:35:17Z\"}") - .toObject(LinkedIntegrationRuntime.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedIntegrationRuntime model = new LinkedIntegrationRuntime(); - model = BinaryData.fromObject(model).toObject(LinkedIntegrationRuntime.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java deleted file mode 100644 index 55996a810d61..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceDebugResourceTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LinkedServiceDebugResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedServiceDebugResource model = BinaryData.fromString( - "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"ffm\",\"parameters\":{\"dby\":\"datawfbkgozxwo\",\"zqaclna\":\"datap\"}},\"description\":\"biygnugjknfsmfct\",\"parameters\":{\"jhvsujztczyt\":{\"type\":\"Float\",\"defaultValue\":\"datayilflqoiquvrehmr\"},\"auunfprnjletlx\":{\"type\":\"Bool\",\"defaultValue\":\"dataw\"},\"nlqwzdvpiwhx\":{\"type\":\"Object\",\"defaultValue\":\"datapddouifamowaziyn\"},\"quhuxylrj\":{\"type\":\"SecureString\",\"defaultValue\":\"datadtmaa\"}},\"annotations\":[\"dataygjbmzyospspsh\"],\"\":{\"df\":\"datakyjpmspbps\",\"vczkcnyxrxmunjd\":\"datapyogtieyuj\",\"nkvxlxpaglqi\":\"datavg\",\"khpzvuqdflv\":\"databgkc\"}},\"name\":\"iypfp\"}") - .toObject(LinkedServiceDebugResource.class); - Assertions.assertEquals("iypfp", model.name()); - Assertions.assertEquals("ffm", model.properties().connectVia().referenceName()); - Assertions.assertEquals("biygnugjknfsmfct", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("jhvsujztczyt").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedServiceDebugResource model = new LinkedServiceDebugResource().withName("iypfp") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("ffm") - .withParameters(mapOf("dby", "datawfbkgozxwo", "zqaclna", "datap"))) - .withDescription("biygnugjknfsmfct") - .withParameters(mapOf("jhvsujztczyt", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datayilflqoiquvrehmr"), - "auunfprnjletlx", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataw"), "nlqwzdvpiwhx", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("datapddouifamowaziyn"), - "quhuxylrj", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datadtmaa"))) - .withAnnotations(Arrays.asList("dataygjbmzyospspsh")) - .withAdditionalProperties(mapOf("type", "LinkedService"))); - model = BinaryData.fromObject(model).toObject(LinkedServiceDebugResource.class); - Assertions.assertEquals("iypfp", model.name()); - Assertions.assertEquals("ffm", model.properties().connectVia().referenceName()); - Assertions.assertEquals("biygnugjknfsmfct", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("jhvsujztczyt").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java deleted file mode 100644 index b72bc665d414..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceListResponseTests.java +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.LinkedServiceResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.LinkedServiceListResponse; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LinkedServiceListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedServiceListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"o\",\"parameters\":{\"ggdtpnapnyiro\":\"datanxknalaulp\",\"ylgqgitxmedjvcsl\":\"datauhpigvp\",\"wwncwzzhxgk\":\"datan\",\"t\":\"datarmgucnap\"}},\"description\":\"ellwptfdy\",\"parameters\":{\"opppcqeq\":{\"type\":\"Object\",\"defaultValue\":\"datauaceopzfqrhhu\"},\"ahzxctobgbk\":{\"type\":\"String\",\"defaultValue\":\"dataz\"},\"grcfb\":{\"type\":\"String\",\"defaultValue\":\"dataizpost\"}},\"annotations\":[\"datamfqjhhkxbp\",\"datajy\",\"datajhxxjyn\",\"datau\"],\"\":{\"szjfauvjfdxxivet\":\"datakrtswbxqz\"}},\"name\":\"cqaqtdoqmcbx\",\"type\":\"vxysl\",\"etag\":\"hsfxoblytkb\",\"id\":\"pe\"},{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"wfbkrvrns\",\"parameters\":{\"ohxcrsbfova\":\"dataq\",\"sub\":\"datarruvwbhsq\",\"rxbpyb\":\"datagjb\",\"twss\":\"datarfbjf\"}},\"description\":\"ftpvjzbexil\",\"parameters\":{\"vwpm\":{\"type\":\"SecureString\",\"defaultValue\":\"dataq\"},\"jhwqytjrybnw\":{\"type\":\"String\",\"defaultValue\":\"dataruoujmk\"}},\"annotations\":[\"datagdrjervnaenqpe\",\"dataindoygmifthnzd\",\"datadslgnayqigynduh\",\"datavhqlkthumaqo\"],\"\":{\"gccymvaolpssl\":\"dataycduier\",\"d\":\"datalfmmdnbbglzpswi\"}},\"name\":\"wyhzdx\",\"type\":\"adbzmnvdfznud\",\"etag\":\"dvxzbncblylpst\",\"id\":\"hh\"}],\"nextLink\":\"rzdzucerscdnt\"}") - .toObject(LinkedServiceListResponse.class); - Assertions.assertEquals("pe", model.value().get(0).id()); - Assertions.assertEquals("o", model.value().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("ellwptfdy", model.value().get(0).properties().description()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("opppcqeq").type()); - Assertions.assertEquals("rzdzucerscdnt", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedServiceListResponse model = new LinkedServiceListResponse().withValue(Arrays.asList( - new LinkedServiceResourceInner().withId("pe") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("o") - .withParameters(mapOf("ggdtpnapnyiro", "datanxknalaulp", "ylgqgitxmedjvcsl", "datauhpigvp", - "wwncwzzhxgk", "datan", "t", "datarmgucnap"))) - .withDescription("ellwptfdy") - .withParameters(mapOf("opppcqeq", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("datauaceopzfqrhhu"), - "ahzxctobgbk", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataz"), "grcfb", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataizpost"))) - .withAnnotations(Arrays.asList("datamfqjhhkxbp", "datajy", "datajhxxjyn", "datau")) - .withAdditionalProperties(mapOf("type", "LinkedService"))), - new LinkedServiceResourceInner().withId("hh") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("wfbkrvrns") - .withParameters(mapOf("ohxcrsbfova", "dataq", "sub", "datarruvwbhsq", "rxbpyb", "datagjb", - "twss", "datarfbjf"))) - .withDescription("ftpvjzbexil") - .withParameters(mapOf("vwpm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataq"), - "jhwqytjrybnw", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataruoujmk"))) - .withAnnotations(Arrays.asList("datagdrjervnaenqpe", "dataindoygmifthnzd", "datadslgnayqigynduh", - "datavhqlkthumaqo")) - .withAdditionalProperties(mapOf("type", "LinkedService"))))) - .withNextLink("rzdzucerscdnt"); - model = BinaryData.fromObject(model).toObject(LinkedServiceListResponse.class); - Assertions.assertEquals("pe", model.value().get(0).id()); - Assertions.assertEquals("o", model.value().get(0).properties().connectVia().referenceName()); - Assertions.assertEquals("ellwptfdy", model.value().get(0).properties().description()); - Assertions.assertEquals(ParameterType.OBJECT, - model.value().get(0).properties().parameters().get("opppcqeq").type()); - Assertions.assertEquals("rzdzucerscdnt", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java deleted file mode 100644 index 61a0ca3e00a5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceReferenceTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LinkedServiceReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedServiceReference model = BinaryData.fromString( - "{\"referenceName\":\"niodkooeb\",\"parameters\":{\"vdkcrodtj\":\"datajhemms\",\"lfoakg\":\"datanfwjlfltkacjvefk\",\"pulpqblylsyxk\":\"datakfpagao\",\"zuempsbzkf\":\"datajnsjervtiagxsd\"}}") - .toObject(LinkedServiceReference.class); - Assertions.assertEquals("niodkooeb", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedServiceReference model = new LinkedServiceReference().withReferenceName("niodkooeb") - .withParameters(mapOf("vdkcrodtj", "datajhemms", "lfoakg", "datanfwjlfltkacjvefk", "pulpqblylsyxk", - "datakfpagao", "zuempsbzkf", "datajnsjervtiagxsd")); - model = BinaryData.fromObject(model).toObject(LinkedServiceReference.class); - Assertions.assertEquals("niodkooeb", model.referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java deleted file mode 100644 index 7082db494fb1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceResourceInnerTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.LinkedServiceResourceInner; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LinkedServiceResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedServiceResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"vfiwjmygtdss\",\"parameters\":{\"emwabnet\":\"datatmweriofzpyq\",\"d\":\"datahhszh\"}},\"description\":\"vwiwubmwmbesld\",\"parameters\":{\"flcxoga\":{\"type\":\"Float\",\"defaultValue\":\"datapp\"},\"qzeqqkdltfzxm\":{\"type\":\"SecureString\",\"defaultValue\":\"datanzmnsikvm\"}},\"annotations\":[\"datahgure\"],\"\":{\"xwak\":\"datawobdagxtibqdx\",\"lbpodxunk\":\"dataogqxndlkzgxhuri\",\"lrb\":\"dataebxmubyynt\"}},\"name\":\"koievseo\",\"type\":\"q\",\"etag\":\"ltmuwlauwzizx\",\"id\":\"pgcjefuzmuvp\"}") - .toObject(LinkedServiceResourceInner.class); - Assertions.assertEquals("pgcjefuzmuvp", model.id()); - Assertions.assertEquals("vfiwjmygtdss", model.properties().connectVia().referenceName()); - Assertions.assertEquals("vwiwubmwmbesld", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("flcxoga").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedServiceResourceInner model = new LinkedServiceResourceInner().withId("pgcjefuzmuvp") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("vfiwjmygtdss") - .withParameters(mapOf("emwabnet", "datatmweriofzpyq", "d", "datahhszh"))) - .withDescription("vwiwubmwmbesld") - .withParameters(mapOf("flcxoga", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapp"), - "qzeqqkdltfzxm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datanzmnsikvm"))) - .withAnnotations(Arrays.asList("datahgure")) - .withAdditionalProperties(mapOf("type", "LinkedService"))); - model = BinaryData.fromObject(model).toObject(LinkedServiceResourceInner.class); - Assertions.assertEquals("pgcjefuzmuvp", model.id()); - Assertions.assertEquals("vfiwjmygtdss", model.properties().connectVia().referenceName()); - Assertions.assertEquals("vwiwubmwmbesld", model.properties().description()); - Assertions.assertEquals(ParameterType.FLOAT, model.properties().parameters().get("flcxoga").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java deleted file mode 100644 index 8109b0a32acd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServiceTests.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LinkedService model = BinaryData.fromString( - "{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"tdum\",\"parameters\":{\"hjpglkf\":\"datapxebmnzbt\"}},\"description\":\"hdneuelfph\",\"parameters\":{\"uvxzxclvi\":{\"type\":\"Array\",\"defaultValue\":\"dataozfikdowwq\"},\"dsjnka\":{\"type\":\"String\",\"defaultValue\":\"dataqzonosggbhcohf\"},\"k\":{\"type\":\"String\",\"defaultValue\":\"datatiiswacffg\"},\"ppfufl\":{\"type\":\"Bool\",\"defaultValue\":\"datawkfvhqcrailvp\"}},\"annotations\":[\"datamh\",\"datalxyjr\",\"datasag\"],\"\":{\"bcvkcvqvpkeq\":\"datanihgwqapnedg\",\"obzdopcjwvnhdl\":\"datacvdrhvoodsot\",\"mutwuoe\":\"datawmgxcxrsl\",\"yqsluic\":\"datarpkhjwn\"}}") - .toObject(LinkedService.class); - Assertions.assertEquals("tdum", model.connectVia().referenceName()); - Assertions.assertEquals("hdneuelfph", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("uvxzxclvi").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LinkedService model = new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("tdum") - .withParameters(mapOf("hjpglkf", "datapxebmnzbt"))) - .withDescription("hdneuelfph") - .withParameters(mapOf("uvxzxclvi", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataozfikdowwq"), "dsjnka", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataqzonosggbhcohf"), "k", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatiiswacffg"), - "ppfufl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawkfvhqcrailvp"))) - .withAnnotations(Arrays.asList("datamh", "datalxyjr", "datasag")) - .withAdditionalProperties(mapOf("type", "LinkedService")); - model = BinaryData.fromObject(model).toObject(LinkedService.class); - Assertions.assertEquals("tdum", model.connectVia().referenceName()); - Assertions.assertEquals("hdneuelfph", model.description()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("uvxzxclvi").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index c25a9486c4d9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.LinkedService; -import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class LinkedServicesCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"vbe\",\"parameters\":{\"zvpgttvykz\":\"datatk\",\"ikhbkcvpubvmsz\":\"datalktenbvpadoseqc\",\"sxncykfq\":\"datazrs\",\"gqctrvfpg\":\"databwes\"}},\"description\":\"l\",\"parameters\":{\"gthppoddnwhaokk\":{\"type\":\"Int\",\"defaultValue\":\"datavlgo\"},\"rqcga\":{\"type\":\"Object\",\"defaultValue\":\"datavimstbyaklfvc\"},\"mnfvbfjkvspxxbfq\":{\"type\":\"Object\",\"defaultValue\":\"dataofy\"}},\"annotations\":[\"datawjiuiryjdwdaocwq\",\"dataxwoqh\"],\"\":{\"hrencxo\":\"dataojiqtpbfcv\",\"cctuxxytmxjpku\":\"datatsdgnhlp\",\"yjnrjrtnk\":\"dataiafgbfkmqhzjsh\",\"w\":\"dataleurjynezp\"}},\"name\":\"cdvwnpt\",\"type\":\"iqeaugidsz\",\"etag\":\"tqsrtzgvwhjfu\",\"id\":\"pstvcqhzejbr\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - LinkedServiceResource response - = manager.linkedServices() - .define("gwriyxyelzm") - .withExistingFactory("uuubtfxjpgjaynof", "yzpnta") - .withProperties(new LinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("harucpkpm") - .withParameters(mapOf("ohkrqbgxhjbap", "datanwobkfu", "btlmpdrkgtlr", "datalluyhivlswipob"))) - .withDescription("msaujxaogtwxgsw") - .withParameters(mapOf("ifs", - new ParameterSpecification().withType(ParameterType.BOOL) - .withDefaultValue("datawfaehryordinfwn"), - "ekfbufty", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataufuddtub"))) - .withAnnotations(Arrays.asList("datalzlsskphwwnnj")) - .withAdditionalProperties(mapOf("type", "LinkedService"))) - .withIfMatch("yi") - .create(); - - Assertions.assertEquals("pstvcqhzejbr", response.id()); - Assertions.assertEquals("vbe", response.properties().connectVia().referenceName()); - Assertions.assertEquals("l", response.properties().description()); - Assertions.assertEquals(ParameterType.INT, response.properties().parameters().get("gthppoddnwhaokk").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java deleted file mode 100644 index 2f9bdfd5186a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class LinkedServicesDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.linkedServices() - .deleteWithResponse("lwcdc", "mlbzcikh", "pdohvwyitcgy", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java deleted file mode 100644 index c1f412fbac96..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesGetWithResponseMockTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class LinkedServicesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"xyr\",\"parameters\":{\"zlkugkgnuhxa\":\"datalvpnoayckzshvca\",\"aps\":\"dataqposnnwnzxikvjev\",\"quxquypcn\":\"datapjh\",\"tkdeyuowdpnaohhe\":\"dataocusyqailqtq\"}},\"description\":\"yrkbsrpruoyjbzyl\",\"parameters\":{\"jbi\":{\"type\":\"Array\",\"defaultValue\":\"dataeuhssrdugaxkyge\"},\"jcqqv\":{\"type\":\"Float\",\"defaultValue\":\"datazkygh\"},\"zcbrxsqode\":{\"type\":\"Float\",\"defaultValue\":\"datayaec\"}},\"annotations\":[\"datajpeeqy\",\"datangcv\",\"datahvbczcsspn\"],\"\":{\"zfpvi\":\"datakjhzgm\",\"qsuokyvrzlu\":\"dataofvzeihlubdjd\",\"nlfyddyykfdlhytc\":\"datay\",\"nmzlsgal\":\"datapfgjzrdgnl\"}},\"name\":\"xyovwuhvpipaa\",\"type\":\"tkmzord\",\"etag\":\"hwipihoxpeyixbrs\",\"id\":\"rj\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - LinkedServiceResource response = manager.linkedServices() - .getWithResponse("xicjl", "lvqbavpl", "kcsrbvvniwqpcq", "oujikvrkpultjc", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("rj", response.id()); - Assertions.assertEquals("xyr", response.properties().connectVia().referenceName()); - Assertions.assertEquals("yrkbsrpruoyjbzyl", response.properties().description()); - Assertions.assertEquals(ParameterType.ARRAY, response.properties().parameters().get("jbi").type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java deleted file mode 100644 index 63a3cef1d810..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LinkedServicesListByFactoryMockTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.LinkedServiceResource; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class LinkedServicesListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"LinkedService\",\"connectVia\":{\"referenceName\":\"sdwcx\",\"parameters\":{\"perbnbsd\":\"dataidyansnunvgqtvg\",\"jgtqqrmi\":\"datavrdsv\",\"irvzbmhmkoxsavzn\":\"datamnmxspz\",\"mebwcuf\":\"datapaw\"}},\"description\":\"qqtpwhicnnan\",\"parameters\":{\"puzxinw\":{\"type\":\"SecureString\",\"defaultValue\":\"databdptmzsdwxls\"},\"rhzd\":{\"type\":\"Object\",\"defaultValue\":\"dataz\"},\"qbwkx\":{\"type\":\"SecureString\",\"defaultValue\":\"dataauhl\"},\"goijhc\":{\"type\":\"Object\",\"defaultValue\":\"dataroewwrhvdwrowec\"}},\"annotations\":[\"dataprviivczupcl\",\"dataitvym\"],\"\":{\"mudyuoholy\":\"datatsnnsxouz\"}},\"name\":\"wszxl\",\"type\":\"rrczhnvtihjt\",\"etag\":\"lnh\",\"id\":\"tuktmrorepbqkmyl\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.linkedServices().listByFactory("sitypashvjr", "niztgaduslnrqy", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("tuktmrorepbqkmyl", response.iterator().next().id()); - Assertions.assertEquals("sdwcx", response.iterator().next().properties().connectVia().referenceName()); - Assertions.assertEquals("qqtpwhicnnan", response.iterator().next().properties().description()); - Assertions.assertEquals(ParameterType.SECURE_STRING, - response.iterator().next().properties().parameters().get("puzxinw").type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java deleted file mode 100644 index 31ad4a1b1da3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogLocationSettingsTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LogLocationSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LogLocationSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"ocnwnjmiitlamf\",\"parameters\":{\"hjxwwqzsyetbff\":\"datak\"}},\"path\":\"dataqzvwznwcqoapdtj\"}") - .toObject(LogLocationSettings.class); - Assertions.assertEquals("ocnwnjmiitlamf", model.linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LogLocationSettings model = new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ocnwnjmiitlamf") - .withParameters(mapOf("hjxwwqzsyetbff", "datak"))) - .withPath("dataqzvwznwcqoapdtj"); - model = BinaryData.fromObject(model).toObject(LogLocationSettings.class); - Assertions.assertEquals("ocnwnjmiitlamf", model.linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java deleted file mode 100644 index 5ad1126535d4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogSettingsTests.java +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CopyActivityLogSettings; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.LogSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LogSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LogSettings model = BinaryData.fromString( - "{\"enableCopyActivityLog\":\"datar\",\"copyActivityLogSettings\":{\"logLevel\":\"datapfteklgsnpvqcz\",\"enableReliableLogging\":\"dataooa\"},\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"lthowcsuebt\",\"parameters\":{\"exar\":\"dataeuimtxmd\",\"ivftl\":\"dataukoir\",\"p\":\"dataskinmxanjguadh\",\"qjnouuujl\":\"datafxstwaaz\"}},\"path\":\"dataicshmqxgjzs\"}}") - .toObject(LogSettings.class); - Assertions.assertEquals("lthowcsuebt", model.logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LogSettings model = new LogSettings().withEnableCopyActivityLog("datar") - .withCopyActivityLogSettings( - new CopyActivityLogSettings().withLogLevel("datapfteklgsnpvqcz").withEnableReliableLogging("dataooa")) - .withLogLocationSettings( - new LogLocationSettings() - .withLinkedServiceName( - new LinkedServiceReference() - .withReferenceName("lthowcsuebt") - .withParameters(mapOf("exar", "dataeuimtxmd", "ivftl", "dataukoir", "p", - "dataskinmxanjguadh", "qjnouuujl", "datafxstwaaz"))) - .withPath("dataicshmqxgjzs")); - model = BinaryData.fromObject(model).toObject(LogSettings.class); - Assertions.assertEquals("lthowcsuebt", model.logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java deleted file mode 100644 index acdde432e104..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LogStorageSettingsTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogStorageSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LogStorageSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LogStorageSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"hzbwrtmjskbienjn\",\"parameters\":{\"yvs\":\"datakmvoun\",\"knox\":\"dataqurrgxq\",\"jbiigxxr\":\"datahedwhmmwbvrp\"}},\"path\":\"datapuzkwigif\",\"logLevel\":\"dataoys\",\"enableReliableLogging\":\"datalpshxjhanskoo\",\"\":{\"qsqvfyokssta\":\"datank\",\"jjltuymna\":\"datajiql\",\"lvccuvcva\":\"dataqhscaand\",\"jgdjvyclas\":\"datalsb\"}}") - .toObject(LogStorageSettings.class); - Assertions.assertEquals("hzbwrtmjskbienjn", model.linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LogStorageSettings model = new LogStorageSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hzbwrtmjskbienjn") - .withParameters(mapOf("yvs", "datakmvoun", "knox", "dataqurrgxq", "jbiigxxr", "datahedwhmmwbvrp"))) - .withPath("datapuzkwigif") - .withLogLevel("dataoys") - .withEnableReliableLogging("datalpshxjhanskoo") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(LogStorageSettings.class); - Assertions.assertEquals("hzbwrtmjskbienjn", model.linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java deleted file mode 100644 index 5685cd611eb9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTests.java +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.CopySource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LookupActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LookupActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LookupActivity model = BinaryData.fromString( - "{\"type\":\"Lookup\",\"typeProperties\":{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"datanijxyczzwhwsi\",\"sourceRetryWait\":\"dataqiavoyh\",\"maxConcurrentConnections\":\"datallnu\",\"disableMetricsCollection\":\"datacbb\",\"\":{\"rt\":\"dataoew\"}},\"dataset\":{\"referenceName\":\"ofcurnhu\",\"parameters\":{\"kzhmyayblmcen\":\"datanyuasst\"}},\"firstRowOnly\":\"datatcxam\"},\"linkedServiceName\":{\"referenceName\":\"lxksph\",\"parameters\":{\"t\":\"datab\",\"w\":\"datalfmaj\"}},\"policy\":{\"timeout\":\"datas\",\"retry\":\"datavkq\",\"retryIntervalInSeconds\":856589229,\"secureInput\":false,\"secureOutput\":true,\"\":{\"cffbsnlv\":\"datanhejualugyu\",\"jdmesoxjkp\":\"datassyzwtzdyzufgnns\"}},\"name\":\"bgfhjwchv\",\"description\":\"biouuqox\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"adghlokvisqzmhe\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Succeeded\",\"Failed\"],\"\":{\"ff\":\"datafjgrtkegrtv\",\"bzykk\":\"databvuxpyveavst\",\"jxgzpmwx\":\"datadjkanizd\",\"twwbahivfosbr\":\"datafrm\"}}],\"userProperties\":[{\"name\":\"whlqydhhypu\",\"value\":\"datahucawmhbqjllyzbq\"}],\"\":{\"u\":\"dataxg\",\"bytzh\":\"dataioumgv\",\"ax\":\"dataqvzwummw\"}}") - .toObject(LookupActivity.class); - Assertions.assertEquals("bgfhjwchv", model.name()); - Assertions.assertEquals("biouuqox", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("adghlokvisqzmhe", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("whlqydhhypu", model.userProperties().get(0).name()); - Assertions.assertEquals("lxksph", model.linkedServiceName().referenceName()); - Assertions.assertEquals(856589229, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("ofcurnhu", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LookupActivity model = new LookupActivity().withName("bgfhjwchv") - .withDescription("biouuqox") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("adghlokvisqzmhe") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("whlqydhhypu").withValue("datahucawmhbqjllyzbq"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lxksph") - .withParameters(mapOf("t", "datab", "w", "datalfmaj"))) - .withPolicy(new ActivityPolicy().withTimeout("datas") - .withRetry("datavkq") - .withRetryIntervalInSeconds(856589229) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withSource(new CopySource().withSourceRetryCount("datanijxyczzwhwsi") - .withSourceRetryWait("dataqiavoyh") - .withMaxConcurrentConnections("datallnu") - .withDisableMetricsCollection("datacbb") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withDataset(new DatasetReference().withReferenceName("ofcurnhu") - .withParameters(mapOf("kzhmyayblmcen", "datanyuasst"))) - .withFirstRowOnly("datatcxam"); - model = BinaryData.fromObject(model).toObject(LookupActivity.class); - Assertions.assertEquals("bgfhjwchv", model.name()); - Assertions.assertEquals("biouuqox", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("adghlokvisqzmhe", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("whlqydhhypu", model.userProperties().get(0).name()); - Assertions.assertEquals("lxksph", model.linkedServiceName().referenceName()); - Assertions.assertEquals(856589229, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("ofcurnhu", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java deleted file mode 100644 index c1b4a3ba03b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/LookupActivityTypePropertiesTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.LookupActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.CopySource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class LookupActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - LookupActivityTypeProperties model = BinaryData.fromString( - "{\"source\":{\"type\":\"CopySource\",\"sourceRetryCount\":\"databihgcduj\",\"sourceRetryWait\":\"dataaiw\",\"maxConcurrentConnections\":\"datarbtrmif\",\"disableMetricsCollection\":\"dataizdukamtfkufvab\",\"\":{\"kozlpsf\":\"dataubyfspliw\"}},\"dataset\":{\"referenceName\":\"ajpyuwrggfgll\",\"parameters\":{\"qqmxkuyyr\":\"datangzvyt\",\"aetgmmfdf\":\"dataqsyqhugj\",\"epwyyeupkpyzaena\":\"dataq\"}},\"firstRowOnly\":\"datayrlqiykhoygfgch\"}") - .toObject(LookupActivityTypeProperties.class); - Assertions.assertEquals("ajpyuwrggfgll", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - LookupActivityTypeProperties model = new LookupActivityTypeProperties() - .withSource(new CopySource().withSourceRetryCount("databihgcduj") - .withSourceRetryWait("dataaiw") - .withMaxConcurrentConnections("datarbtrmif") - .withDisableMetricsCollection("dataizdukamtfkufvab") - .withAdditionalProperties(mapOf("type", "CopySource"))) - .withDataset(new DatasetReference().withReferenceName("ajpyuwrggfgll") - .withParameters( - mapOf("qqmxkuyyr", "datangzvyt", "aetgmmfdf", "dataqsyqhugj", "epwyyeupkpyzaena", "dataq"))) - .withFirstRowOnly("datayrlqiykhoygfgch"); - model = BinaryData.fromObject(model).toObject(LookupActivityTypeProperties.class); - Assertions.assertEquals("ajpyuwrggfgll", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java deleted file mode 100644 index d9be1c57b795..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoObjectDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MagentoObjectDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MagentoObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MagentoObjectDataset model = BinaryData.fromString( - "{\"type\":\"MagentoObject\",\"typeProperties\":{\"tableName\":\"dataf\"},\"description\":\"zgccyn\",\"structure\":\"datavmsiehedm\",\"schema\":\"dataoneeyrxpa\",\"linkedServiceName\":{\"referenceName\":\"xtzayqwddig\",\"parameters\":{\"d\":\"dataspl\"}},\"parameters\":{\"zbjkjqpizd\":{\"type\":\"Object\",\"defaultValue\":\"datavee\"},\"wl\":{\"type\":\"Float\",\"defaultValue\":\"datah\"},\"ywbnk\":{\"type\":\"Int\",\"defaultValue\":\"dataj\"}},\"annotations\":[\"datarxactsawvxcimp\"],\"folder\":{\"name\":\"rm\"},\"\":{\"knyfuysjhvrr\":\"datamslub\"}}") - .toObject(MagentoObjectDataset.class); - Assertions.assertEquals("zgccyn", model.description()); - Assertions.assertEquals("xtzayqwddig", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("zbjkjqpizd").type()); - Assertions.assertEquals("rm", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MagentoObjectDataset model = new MagentoObjectDataset().withDescription("zgccyn") - .withStructure("datavmsiehedm") - .withSchema("dataoneeyrxpa") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("xtzayqwddig").withParameters(mapOf("d", "dataspl"))) - .withParameters(mapOf("zbjkjqpizd", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datavee"), "wl", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datah"), "ywbnk", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataj"))) - .withAnnotations(Arrays.asList("datarxactsawvxcimp")) - .withFolder(new DatasetFolder().withName("rm")) - .withTableName("dataf"); - model = BinaryData.fromObject(model).toObject(MagentoObjectDataset.class); - Assertions.assertEquals("zgccyn", model.description()); - Assertions.assertEquals("xtzayqwddig", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("zbjkjqpizd").type()); - Assertions.assertEquals("rm", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java deleted file mode 100644 index 964fe099d3f3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MagentoSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MagentoSource; - -public final class MagentoSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MagentoSource model = BinaryData.fromString( - "{\"type\":\"MagentoSource\",\"query\":\"datatxe\",\"queryTimeout\":\"datamimgjuvjvtgece\",\"additionalColumns\":\"datannle\",\"sourceRetryCount\":\"dataoukfjwkctdn\",\"sourceRetryWait\":\"dataokqeuzslny\",\"maxConcurrentConnections\":\"datauywijnlpeczq\",\"disableMetricsCollection\":\"datamzkqydthf\",\"\":{\"rwu\":\"datacmwvp\",\"lekchjdhlskeifw\":\"datanfovylis\",\"ptvbudb\":\"datatcownxiw\"}}") - .toObject(MagentoSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MagentoSource model = new MagentoSource().withSourceRetryCount("dataoukfjwkctdn") - .withSourceRetryWait("dataokqeuzslny") - .withMaxConcurrentConnections("datauywijnlpeczq") - .withDisableMetricsCollection("datamzkqydthf") - .withQueryTimeout("datamimgjuvjvtgece") - .withAdditionalColumns("datannle") - .withQuery("datatxe"); - model = BinaryData.fromObject(model).toObject(MagentoSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java deleted file mode 100644 index c7e5114132e7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityCredentialTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ManagedIdentityCredentialTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedIdentityCredential model = BinaryData.fromString( - "{\"type\":\"ManagedIdentity\",\"typeProperties\":{\"resourceId\":\"rxpxslc\"},\"description\":\"yscjefapouwsyns\",\"annotations\":[\"datad\",\"datardle\",\"datajzv\"],\"\":{\"titktke\":\"datayhggvhcoao\"}}") - .toObject(ManagedIdentityCredential.class); - Assertions.assertEquals("yscjefapouwsyns", model.description()); - Assertions.assertEquals("rxpxslc", model.resourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedIdentityCredential model = new ManagedIdentityCredential().withDescription("yscjefapouwsyns") - .withAnnotations(Arrays.asList("datad", "datardle", "datajzv")) - .withResourceId("rxpxslc"); - model = BinaryData.fromObject(model).toObject(ManagedIdentityCredential.class); - Assertions.assertEquals("yscjefapouwsyns", model.description()); - Assertions.assertEquals("rxpxslc", model.resourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java deleted file mode 100644 index 9a056a1498f4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedIdentityTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedIdentityTypeProperties; -import org.junit.jupiter.api.Assertions; - -public final class ManagedIdentityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedIdentityTypeProperties model - = BinaryData.fromString("{\"resourceId\":\"amb\"}").toObject(ManagedIdentityTypeProperties.class); - Assertions.assertEquals("amb", model.resourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedIdentityTypeProperties model = new ManagedIdentityTypeProperties().withResourceId("amb"); - model = BinaryData.fromObject(model).toObject(ManagedIdentityTypeProperties.class); - Assertions.assertEquals("amb", model.resourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java deleted file mode 100644 index 710e08e95249..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointListResponseTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedPrivateEndpointResourceInner; -import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointListResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ManagedPrivateEndpointListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedPrivateEndpointListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"wdkqzeqy\",\"description\":\"eziunjxdfzant\",\"status\":\"cegyamlbnseqacj\"},\"fqdns\":[\"ilguooqjagmditg\",\"eiookjbsah\",\"tdtpdelqacslmo\"],\"groupId\":\"ebnfxofvc\",\"isReserved\":false,\"privateLinkResourceId\":\"irazftxejwabmd\",\"provisioningState\":\"tmvcop\",\"\":{\"urbuhhlkyqltq\":\"datam\"}},\"name\":\"ogtu\",\"type\":\"ffdjktsysidfvclg\",\"etag\":\"n\",\"id\":\"ijtk\"},{\"properties\":{\"connectionState\":{\"actionsRequired\":\"qogsfikayian\",\"description\":\"arujt\",\"status\":\"qxfzyjqttvwk\"},\"fqdns\":[\"j\",\"enuygbq\",\"qqekewvnqvcdlgu\"],\"groupId\":\"cmfdjwnlax\",\"isReserved\":false,\"privateLinkResourceId\":\"qikczvvita\",\"provisioningState\":\"xmfcsserxhtv\",\"\":{\"sxypruuu\":\"datahlwntsjgq\"}},\"name\":\"nchrszizoyu\",\"type\":\"yetnd\",\"etag\":\"fqyggagflnlgmtr\",\"id\":\"hzjmucftbyrp\"},{\"properties\":{\"connectionState\":{\"actionsRequired\":\"hkpigqfusuckzmkw\",\"description\":\"snoxaxmqeqa\",\"status\":\"hjnhgwydyynfsvk\"},\"fqdns\":[\"vqtanarfdlpuk\"],\"groupId\":\"yrneizjcpeo\",\"isReserved\":true,\"privateLinkResourceId\":\"mgbro\",\"provisioningState\":\"ddbhf\",\"\":{\"zoyw\":\"datapaz\",\"htuevrhrljy\":\"dataxhpdulontacnpqwt\",\"reur\":\"dataogwxhnsduugwb\",\"fuarenlvhht\":\"dataq\"}},\"name\":\"nvnaf\",\"type\":\"kyfede\",\"etag\":\"bo\",\"id\":\"cqxypokkhminq\"}],\"nextLink\":\"mczngn\"}") - .toObject(ManagedPrivateEndpointListResponse.class); - Assertions.assertEquals("ijtk", model.value().get(0).id()); - Assertions.assertEquals("ilguooqjagmditg", model.value().get(0).properties().fqdns().get(0)); - Assertions.assertEquals("ebnfxofvc", model.value().get(0).properties().groupId()); - Assertions.assertEquals("irazftxejwabmd", model.value().get(0).properties().privateLinkResourceId()); - Assertions.assertEquals("mczngn", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedPrivateEndpointListResponse model = new ManagedPrivateEndpointListResponse() - .withValue(Arrays.asList( - new ManagedPrivateEndpointResourceInner().withId("ijtk") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("ilguooqjagmditg", "eiookjbsah", "tdtpdelqacslmo")) - .withGroupId("ebnfxofvc") - .withPrivateLinkResourceId("irazftxejwabmd") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "tmvcop"))), - new ManagedPrivateEndpointResourceInner().withId("hzjmucftbyrp") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("j", "enuygbq", "qqekewvnqvcdlgu")) - .withGroupId("cmfdjwnlax") - .withPrivateLinkResourceId("qikczvvita") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "xmfcsserxhtv"))), - new ManagedPrivateEndpointResourceInner().withId("cqxypokkhminq") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("vqtanarfdlpuk")) - .withGroupId("yrneizjcpeo") - .withPrivateLinkResourceId("mgbro") - .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "ddbhf"))))) - .withNextLink("mczngn"); - model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpointListResponse.class); - Assertions.assertEquals("ijtk", model.value().get(0).id()); - Assertions.assertEquals("ilguooqjagmditg", model.value().get(0).properties().fqdns().get(0)); - Assertions.assertEquals("ebnfxofvc", model.value().get(0).properties().groupId()); - Assertions.assertEquals("irazftxejwabmd", model.value().get(0).properties().privateLinkResourceId()); - Assertions.assertEquals("mczngn", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java deleted file mode 100644 index 16b46d5bad24..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointResourceInnerTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedPrivateEndpointResourceInner; -import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ManagedPrivateEndpointResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedPrivateEndpointResourceInner model = BinaryData.fromString( - "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"x\",\"description\":\"unin\",\"status\":\"db\"},\"fqdns\":[\"qdtvqecrqctmxx\",\"tddmf\",\"huytxzvtzn\",\"pxbannovvoxc\"],\"groupId\":\"tprwnw\",\"isReserved\":true,\"privateLinkResourceId\":\"vytlyokrrrouuxvn\",\"provisioningState\":\"sbcrymodizrxklo\",\"\":{\"lmv\":\"datanazpmk\",\"zxlioh\":\"datavfxzopjh\",\"dtfgxqbawpcbb\":\"datad\"}},\"name\":\"qcy\",\"type\":\"apqofyuicdhz\",\"etag\":\"ybww\",\"id\":\"d\"}") - .toObject(ManagedPrivateEndpointResourceInner.class); - Assertions.assertEquals("d", model.id()); - Assertions.assertEquals("qdtvqecrqctmxx", model.properties().fqdns().get(0)); - Assertions.assertEquals("tprwnw", model.properties().groupId()); - Assertions.assertEquals("vytlyokrrrouuxvn", model.properties().privateLinkResourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedPrivateEndpointResourceInner model = new ManagedPrivateEndpointResourceInner().withId("d") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("qdtvqecrqctmxx", "tddmf", "huytxzvtzn", "pxbannovvoxc")) - .withGroupId("tprwnw") - .withPrivateLinkResourceId("vytlyokrrrouuxvn") - .withAdditionalProperties(mapOf("isReserved", true, "provisioningState", "sbcrymodizrxklo"))); - model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpointResourceInner.class); - Assertions.assertEquals("d", model.id()); - Assertions.assertEquals("qdtvqecrqctmxx", model.properties().fqdns().get(0)); - Assertions.assertEquals("tprwnw", model.properties().groupId()); - Assertions.assertEquals("vytlyokrrrouuxvn", model.properties().privateLinkResourceId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java deleted file mode 100644 index 44e087a8a7f5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ManagedPrivateEndpointTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedPrivateEndpoint model = BinaryData.fromString( - "{\"connectionState\":{\"actionsRequired\":\"idmhmwf\",\"description\":\"lfmu\",\"status\":\"pckc\"},\"fqdns\":[\"vwe\"],\"groupId\":\"xoy\",\"isReserved\":false,\"privateLinkResourceId\":\"haim\",\"provisioningState\":\"iroqbosh\",\"\":{\"pavbo\":\"datagapyyrmfsv\"}}") - .toObject(ManagedPrivateEndpoint.class); - Assertions.assertEquals("vwe", model.fqdns().get(0)); - Assertions.assertEquals("xoy", model.groupId()); - Assertions.assertEquals("haim", model.privateLinkResourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedPrivateEndpoint model = new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("vwe")) - .withGroupId("xoy") - .withPrivateLinkResourceId("haim") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "iroqbosh")); - model = BinaryData.fromObject(model).toObject(ManagedPrivateEndpoint.class); - Assertions.assertEquals("vwe", model.fqdns().get(0)); - Assertions.assertEquals("xoy", model.groupId()); - Assertions.assertEquals("haim", model.privateLinkResourceId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index dc8272bbf9c2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ConnectionStateProperties; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedPrivateEndpointsCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"hdr\",\"description\":\"aqtrcwzmdencq\",\"status\":\"sqnliougavudmxct\"},\"fqdns\":[\"zrkcvbfeucd\",\"ejazhtetimcjk\",\"exxn\"],\"groupId\":\"lcekonmcxriqfrrx\",\"isReserved\":true,\"privateLinkResourceId\":\"rhcjhsz\",\"provisioningState\":\"mfriosm\",\"\":{\"yixbbhj\":\"datakizqqdawmrk\",\"zdjzhxwobxso\":\"datanj\",\"odrtk\":\"dataemawrnq\"}},\"name\":\"mgllnyohnhfup\",\"type\":\"fosrwzhmlklocyjp\",\"etag\":\"tnvxomhk\",\"id\":\"vdmjjiqjv\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedPrivateEndpointResource response = manager.managedPrivateEndpoints() - .define("elupsobq") - .withExistingManagedVirtualNetwork("ddadez", "saecdc", "hxwegdsmnyphv") - .withProperties(new ManagedPrivateEndpoint().withConnectionState(new ConnectionStateProperties()) - .withFqdns(Arrays.asList("b")) - .withGroupId("yedrkgrtda") - .withPrivateLinkResourceId("oimtar") - .withAdditionalProperties(mapOf("isReserved", false, "provisioningState", "exkbmod"))) - .withIfMatch("mblhcbanzkw") - .create(); - - Assertions.assertEquals("vdmjjiqjv", response.id()); - Assertions.assertEquals("zrkcvbfeucd", response.properties().fqdns().get(0)); - Assertions.assertEquals("lcekonmcxriqfrrx", response.properties().groupId()); - Assertions.assertEquals("rhcjhsz", response.properties().privateLinkResourceId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java deleted file mode 100644 index 3cd578542950..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedPrivateEndpointsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.managedPrivateEndpoints() - .deleteWithResponse("qnsigrqcxhwvzd", "ujmuka", "zu", "txf", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java deleted file mode 100644 index 3049f044d56f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsGetWithResponseMockTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedPrivateEndpointsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"connectionState\":{\"actionsRequired\":\"forxakpmzkdisr\",\"description\":\"krcjvoivnfdovw\",\"status\":\"eguvip\"},\"fqdns\":[\"inwrhfrbwoylpme\",\"cbblg\"],\"groupId\":\"e\",\"isReserved\":true,\"privateLinkResourceId\":\"rxuyorh\",\"provisioningState\":\"ihzwdoflwlm\",\"\":{\"vkuuyehmnvfhyiax\":\"datavmfosfpgqux\"}},\"name\":\"pwhczqjoovyps\",\"type\":\"ghwokbwzpxlx\",\"etag\":\"hhkabeo\",\"id\":\"qoetckm\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedPrivateEndpointResource response = manager.managedPrivateEndpoints() - .getWithResponse("qdtcibb", "ijkwzjlk", "moexughztr", "timtf", "uylqpzskngfcbl", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("qoetckm", response.id()); - Assertions.assertEquals("inwrhfrbwoylpme", response.properties().fqdns().get(0)); - Assertions.assertEquals("e", response.properties().groupId()); - Assertions.assertEquals("rxuyorh", response.properties().privateLinkResourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java deleted file mode 100644 index 8005f96d7c4c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedPrivateEndpointsListByFactoryMockTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpointResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedPrivateEndpointsListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"connectionState\":{\"actionsRequired\":\"ngxbkcir\",\"description\":\"ttzyruvgihwiezcf\",\"status\":\"jdplkuyo\"},\"fqdns\":[\"ftdljbjp\",\"vlgahpztvl\",\"ffymrzoupipdkgpt\",\"mym\"],\"groupId\":\"rtpznychwbzrbqpz\",\"isReserved\":false,\"privateLinkResourceId\":\"hbf\",\"provisioningState\":\"fytwrnvwaxmey\",\"\":{\"q\":\"datafqmhcqv\",\"ufuvt\":\"datazajdxmaim\",\"ieknxbcgnphe\":\"datapvwfll\",\"kekx\":\"datagkw\"}},\"name\":\"qvqpwz\",\"type\":\"tvmkjleela\",\"etag\":\"hgefvp\",\"id\":\"yvbyagqipr\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.managedPrivateEndpoints() - .listByFactory("jpy", "alt", "rhiinata", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("yvbyagqipr", response.iterator().next().id()); - Assertions.assertEquals("ftdljbjp", response.iterator().next().properties().fqdns().get(0)); - Assertions.assertEquals("rtpznychwbzrbqpz", response.iterator().next().properties().groupId()); - Assertions.assertEquals("hbf", response.iterator().next().properties().privateLinkResourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java deleted file mode 100644 index 84ffd864f74c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkListResponseTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedVirtualNetworkResourceInner; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkListResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ManagedVirtualNetworkListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedVirtualNetworkListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"vNetId\":\"o\",\"alias\":\"vmfqhppubo\",\"\":{\"juahokqto\":\"datapdfgkmtdherngbt\",\"hfphwpnulaiywze\":\"datakauxof\",\"wrpqafgfugsnnf\":\"dataywhslwkojpllndnp\",\"coc\":\"datayetefyp\"}},\"name\":\"jgtixr\",\"type\":\"zuyt\",\"etag\":\"mlmuowol\",\"id\":\"uir\"}],\"nextLink\":\"ionszonwp\"}") - .toObject(ManagedVirtualNetworkListResponse.class); - Assertions.assertEquals("uir", model.value().get(0).id()); - Assertions.assertEquals("ionszonwp", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedVirtualNetworkListResponse model - = new ManagedVirtualNetworkListResponse() - .withValue(Arrays.asList(new ManagedVirtualNetworkResourceInner().withId("uir") - .withProperties(new ManagedVirtualNetwork() - .withAdditionalProperties(mapOf("vNetId", "o", "alias", "vmfqhppubo"))))) - .withNextLink("ionszonwp"); - model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkListResponse.class); - Assertions.assertEquals("uir", model.value().get(0).id()); - Assertions.assertEquals("ionszonwp", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java deleted file mode 100644 index 6f1db97a0327..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkReferenceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkReference; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class ManagedVirtualNetworkReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedVirtualNetworkReference model - = BinaryData.fromString("{\"type\":\"ManagedVirtualNetworkReference\",\"referenceName\":\"epkpe\"}") - .toObject(ManagedVirtualNetworkReference.class); - Assertions.assertEquals(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE, model.type()); - Assertions.assertEquals("epkpe", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedVirtualNetworkReference model = new ManagedVirtualNetworkReference() - .withType(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE) - .withReferenceName("epkpe"); - model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkReference.class); - Assertions.assertEquals(ManagedVirtualNetworkReferenceType.MANAGED_VIRTUAL_NETWORK_REFERENCE, model.type()); - Assertions.assertEquals("epkpe", model.referenceName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java deleted file mode 100644 index 9a3679e5710b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkResourceInnerTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ManagedVirtualNetworkResourceInner; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ManagedVirtualNetworkResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedVirtualNetworkResourceInner model = BinaryData.fromString( - "{\"properties\":{\"vNetId\":\"ajinnixjawrtmjfj\",\"alias\":\"ccxlzhcoxovnek\",\"\":{\"jvidttge\":\"datalusfnrdtjxtxrdcq\",\"iesfuug\":\"datauslvyjtcvuwkasi\"}},\"name\":\"uqfecj\",\"type\":\"ygtuhx\",\"etag\":\"cbuewmrswnjlxuz\",\"id\":\"wpusxjbaqehg\"}") - .toObject(ManagedVirtualNetworkResourceInner.class); - Assertions.assertEquals("wpusxjbaqehg", model.id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedVirtualNetworkResourceInner model = new ManagedVirtualNetworkResourceInner().withId("wpusxjbaqehg") - .withProperties(new ManagedVirtualNetwork() - .withAdditionalProperties(mapOf("vNetId", "ajinnixjawrtmjfj", "alias", "ccxlzhcoxovnek"))); - model = BinaryData.fromObject(model).toObject(ManagedVirtualNetworkResourceInner.class); - Assertions.assertEquals("wpusxjbaqehg", model.id()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java deleted file mode 100644 index 39ee62f232f4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworkTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; -import java.util.HashMap; -import java.util.Map; - -public final class ManagedVirtualNetworkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ManagedVirtualNetwork model = BinaryData.fromString( - "{\"vNetId\":\"ohzjqatucoigeb\",\"alias\":\"cnwfepbnwgfmxjg\",\"\":{\"qbctqha\":\"datajbgdlfgtdysnaquf\"}}") - .toObject(ManagedVirtualNetwork.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ManagedVirtualNetwork model = new ManagedVirtualNetwork() - .withAdditionalProperties(mapOf("vNetId", "ohzjqatucoigeb", "alias", "cnwfepbnwgfmxjg")); - model = BinaryData.fromObject(model).toObject(ManagedVirtualNetwork.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 3c33930b4a15..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedVirtualNetworksCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"vNetId\":\"wolzuk\",\"alias\":\"qlszlymy\",\"\":{\"qmi\":\"datajbu\",\"cvrfqqmbuvti\":\"dataukwmzm\"}},\"name\":\"mcymwr\",\"type\":\"kaztuldg\",\"etag\":\"edvxhqhptn\",\"id\":\"pafurttshr\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedVirtualNetworkResource response = manager.managedVirtualNetworks() - .define("sta") - .withExistingFactory("ycjuxabpuphg", "gmggkkjciz") - .withProperties( - new ManagedVirtualNetwork().withAdditionalProperties(mapOf("vNetId", "mjpgzwtlupz", "alias", "gsidk"))) - .withIfMatch("eyfkrcmx") - .create(); - - Assertions.assertEquals("pafurttshr", response.id()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java deleted file mode 100644 index 3cbddafddc90..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksGetWithResponseMockTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedVirtualNetworksGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"vNetId\":\"aeaugebqhbb\",\"alias\":\"dyvvpcoiaaa\",\"\":{\"p\":\"dataecwwdqgooabhfrg\"}},\"name\":\"fh\",\"type\":\"rfevwcxzxvgf\",\"etag\":\"ckqwqujpugjsjal\",\"id\":\"bypvpds\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ManagedVirtualNetworkResource response = manager.managedVirtualNetworks() - .getWithResponse("jfllqmuzeolcgqj", "vpalkmwvgdfu", "dswjtuqw", "jpauic", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("bypvpds", response.id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java deleted file mode 100644 index 7da771d79e90..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ManagedVirtualNetworksListByFactoryMockTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetworkResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class ManagedVirtualNetworksListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"vNetId\":\"z\",\"alias\":\"eghmfmuxd\",\"\":{\"zle\":\"datagdcszzzedf\"}},\"name\":\"vldeqmfz\",\"type\":\"ikhnwseftlj\",\"etag\":\"fpfkdybezaxith\",\"id\":\"jxtobeqgzcadoq\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.managedVirtualNetworks().listByFactory("nlmxzdw", "dwbnou", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("jxtobeqgzcadoq", response.iterator().next().id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java deleted file mode 100644 index 28c045046ab5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingTests.java +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeReference; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MappingType; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class MapperAttributeMappingTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperAttributeMapping model = BinaryData.fromString( - "{\"name\":\"ehpavawugiqjtiog\",\"type\":\"Aggregate\",\"functionName\":\"inic\",\"expression\":\"eajohiyg\",\"attributeReference\":{\"name\":\"bonhpczykmktp\",\"entity\":\"xqcsehch\",\"entityConnectionReference\":{\"connectionName\":\"fmpqumqyjg\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"dsaeuzanhsfnh\",\"entity\":\"nwph\",\"entityConnectionReference\":{\"connectionName\":\"ngqjclidf\",\"type\":\"linkedservicetype\"}}]}") - .toObject(MapperAttributeMapping.class); - Assertions.assertEquals("ehpavawugiqjtiog", model.name()); - Assertions.assertEquals(MappingType.AGGREGATE, model.type()); - Assertions.assertEquals("inic", model.functionName()); - Assertions.assertEquals("eajohiyg", model.expression()); - Assertions.assertEquals("bonhpczykmktp", model.attributeReference().name()); - Assertions.assertEquals("xqcsehch", model.attributeReference().entity()); - Assertions.assertEquals("fmpqumqyjg", model.attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("dsaeuzanhsfnh", model.attributeReferences().get(0).name()); - Assertions.assertEquals("nwph", model.attributeReferences().get(0).entity()); - Assertions.assertEquals("ngqjclidf", - model.attributeReferences().get(0).entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeReferences().get(0).entityConnectionReference().type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperAttributeMapping model = new MapperAttributeMapping().withName("ehpavawugiqjtiog") - .withType(MappingType.AGGREGATE) - .withFunctionName("inic") - .withExpression("eajohiyg") - .withAttributeReference(new MapperAttributeReference().withName("bonhpczykmktp") - .withEntity("xqcsehch") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("fmpqumqyjg") - .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference().withName("dsaeuzanhsfnh") - .withEntity("nwph") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("ngqjclidf") - .withType(ConnectionType.LINKEDSERVICETYPE)))); - model = BinaryData.fromObject(model).toObject(MapperAttributeMapping.class); - Assertions.assertEquals("ehpavawugiqjtiog", model.name()); - Assertions.assertEquals(MappingType.AGGREGATE, model.type()); - Assertions.assertEquals("inic", model.functionName()); - Assertions.assertEquals("eajohiyg", model.expression()); - Assertions.assertEquals("bonhpczykmktp", model.attributeReference().name()); - Assertions.assertEquals("xqcsehch", model.attributeReference().entity()); - Assertions.assertEquals("fmpqumqyjg", model.attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("dsaeuzanhsfnh", model.attributeReferences().get(0).name()); - Assertions.assertEquals("nwph", model.attributeReferences().get(0).entity()); - Assertions.assertEquals("ngqjclidf", - model.attributeReferences().get(0).entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeReferences().get(0).entityConnectionReference().type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java deleted file mode 100644 index 647220577839..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeMappingsTests.java +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMappings; -import com.azure.resourcemanager.datafactory.models.MapperAttributeReference; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MappingType; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class MapperAttributeMappingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperAttributeMappings model = BinaryData.fromString( - "{\"attributeMappings\":[{\"name\":\"qumoeno\",\"type\":\"Direct\",\"functionName\":\"enhqhskndnelq\",\"expression\":\"adlknwfoanni\",\"attributeReference\":{\"name\":\"etxivcnrlyxnuc\",\"entity\":\"p\",\"entityConnectionReference\":{\"connectionName\":\"kwqpat\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"cjbctviv\",\"entity\":\"qymtuowogtgits\",\"entityConnectionReference\":{\"connectionName\":\"vbrzcdb\",\"type\":\"linkedservicetype\"}},{\"name\":\"ndscxmxeatk\",\"entity\":\"mwnrdj\",\"entityConnectionReference\":{\"connectionName\":\"q\",\"type\":\"linkedservicetype\"}}]},{\"name\":\"mhjrm\",\"type\":\"Aggregate\",\"functionName\":\"axljal\",\"expression\":\"hcjmo\",\"attributeReference\":{\"name\":\"nc\",\"entity\":\"xxqcwgaxf\",\"entityConnectionReference\":{\"connectionName\":\"knokzw\",\"type\":\"linkedservicetype\"}},\"attributeReferences\":[{\"name\":\"ixldzyyfy\",\"entity\":\"qsix\",\"entityConnectionReference\":{\"connectionName\":\"puj\",\"type\":\"linkedservicetype\"}},{\"name\":\"lkjuvsmbmslzoyov\",\"entity\":\"dbpqvybefgvm\",\"entityConnectionReference\":{\"connectionName\":\"kcvtl\",\"type\":\"linkedservicetype\"}},{\"name\":\"skvcuartrhunl\",\"entity\":\"rykycndzfqivjr\",\"entityConnectionReference\":{\"connectionName\":\"kbbm\",\"type\":\"linkedservicetype\"}},{\"name\":\"ltbxoeeonqlnfw\",\"entity\":\"ymvqdbpbhfckdvez\",\"entityConnectionReference\":{\"connectionName\":\"ssbzhddubbnqfbl\",\"type\":\"linkedservicetype\"}}]}]}") - .toObject(MapperAttributeMappings.class); - Assertions.assertEquals("qumoeno", model.attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.DIRECT, model.attributeMappings().get(0).type()); - Assertions.assertEquals("enhqhskndnelq", model.attributeMappings().get(0).functionName()); - Assertions.assertEquals("adlknwfoanni", model.attributeMappings().get(0).expression()); - Assertions.assertEquals("etxivcnrlyxnuc", model.attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("p", model.attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("kwqpat", - model.attributeMappings().get(0).attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappings().get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("cjbctviv", model.attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("qymtuowogtgits", - model.attributeMappings().get(0).attributeReferences().get(0).entity()); - Assertions.assertEquals("vbrzcdb", - model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperAttributeMappings model = new MapperAttributeMappings().withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("qumoeno") - .withType(MappingType.DIRECT) - .withFunctionName("enhqhskndnelq") - .withExpression("adlknwfoanni") - .withAttributeReference(new MapperAttributeReference().withName("etxivcnrlyxnuc") - .withEntity("p") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("kwqpat") - .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("cjbctviv") - .withEntity("qymtuowogtgits") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("vbrzcdb") - .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("ndscxmxeatk") - .withEntity("mwnrdj") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("q") - .withType(ConnectionType.LINKEDSERVICETYPE)))), - new MapperAttributeMapping().withName("mhjrm") - .withType(MappingType.AGGREGATE) - .withFunctionName("axljal") - .withExpression("hcjmo") - .withAttributeReference(new MapperAttributeReference().withName("nc") - .withEntity("xxqcwgaxf") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("knokzw") - .withType(ConnectionType.LINKEDSERVICETYPE))) - .withAttributeReferences(Arrays.asList( - new MapperAttributeReference().withName("ixldzyyfy") - .withEntity("qsix") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("puj") - .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("lkjuvsmbmslzoyov") - .withEntity("dbpqvybefgvm") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("kcvtl") - .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("skvcuartrhunl") - .withEntity("rykycndzfqivjr") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("kbbm") - .withType(ConnectionType.LINKEDSERVICETYPE)), - new MapperAttributeReference().withName("ltbxoeeonqlnfw") - .withEntity("ymvqdbpbhfckdvez") - .withEntityConnectionReference( - new MapperConnectionReference().withConnectionName("ssbzhddubbnqfbl") - .withType(ConnectionType.LINKEDSERVICETYPE)))))); - model = BinaryData.fromObject(model).toObject(MapperAttributeMappings.class); - Assertions.assertEquals("qumoeno", model.attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.DIRECT, model.attributeMappings().get(0).type()); - Assertions.assertEquals("enhqhskndnelq", model.attributeMappings().get(0).functionName()); - Assertions.assertEquals("adlknwfoanni", model.attributeMappings().get(0).expression()); - Assertions.assertEquals("etxivcnrlyxnuc", model.attributeMappings().get(0).attributeReference().name()); - Assertions.assertEquals("p", model.attributeMappings().get(0).attributeReference().entity()); - Assertions.assertEquals("kwqpat", - model.attributeMappings().get(0).attributeReference().entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappings().get(0).attributeReference().entityConnectionReference().type()); - Assertions.assertEquals("cjbctviv", model.attributeMappings().get(0).attributeReferences().get(0).name()); - Assertions.assertEquals("qymtuowogtgits", - model.attributeMappings().get(0).attributeReferences().get(0).entity()); - Assertions.assertEquals("vbrzcdb", - model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.attributeMappings().get(0).attributeReferences().get(0).entityConnectionReference().type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java deleted file mode 100644 index eec5d02e0d0c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperAttributeReferenceTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.MapperAttributeReference; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import org.junit.jupiter.api.Assertions; - -public final class MapperAttributeReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperAttributeReference model = BinaryData.fromString( - "{\"name\":\"jj\",\"entity\":\"wbeqrkuor\",\"entityConnectionReference\":{\"connectionName\":\"sruqnmdvha\",\"type\":\"linkedservicetype\"}}") - .toObject(MapperAttributeReference.class); - Assertions.assertEquals("jj", model.name()); - Assertions.assertEquals("wbeqrkuor", model.entity()); - Assertions.assertEquals("sruqnmdvha", model.entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.entityConnectionReference().type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperAttributeReference model = new MapperAttributeReference().withName("jj") - .withEntity("wbeqrkuor") - .withEntityConnectionReference(new MapperConnectionReference().withConnectionName("sruqnmdvha") - .withType(ConnectionType.LINKEDSERVICETYPE)); - model = BinaryData.fromObject(model).toObject(MapperAttributeReference.class); - Assertions.assertEquals("jj", model.name()); - Assertions.assertEquals("wbeqrkuor", model.entity()); - Assertions.assertEquals("sruqnmdvha", model.entityConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.entityConnectionReference().type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java deleted file mode 100644 index 20d32cdd0b4c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionReferenceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import org.junit.jupiter.api.Assertions; - -public final class MapperConnectionReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperConnectionReference model - = BinaryData.fromString("{\"connectionName\":\"xwetwkdrcyrucpc\",\"type\":\"linkedservicetype\"}") - .toObject(MapperConnectionReference.class); - Assertions.assertEquals("xwetwkdrcyrucpc", model.connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperConnectionReference model = new MapperConnectionReference().withConnectionName("xwetwkdrcyrucpc") - .withType(ConnectionType.LINKEDSERVICETYPE); - model = BinaryData.fromObject(model).toObject(MapperConnectionReference.class); - Assertions.assertEquals("xwetwkdrcyrucpc", model.connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java deleted file mode 100644 index 05225efbb674..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperConnectionTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MapperConnectionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperConnection model = BinaryData.fromString( - "{\"linkedService\":{\"referenceName\":\"az\",\"parameters\":{\"lv\":\"datatggmuwdchozfnkfe\",\"oaiknaqlnuwig\":\"datanoakiz\"}},\"linkedServiceType\":\"xly\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"cwxhmpej\",\"value\":\"datake\"},{\"name\":\"onwivkcqhrxhxkn\",\"value\":\"datacrmmkyupijuby\"},{\"name\":\"fkak\",\"value\":\"datafrkemyildudxja\"}]}") - .toObject(MapperConnection.class); - Assertions.assertEquals("az", model.linkedService().referenceName()); - Assertions.assertEquals("xly", model.linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); - Assertions.assertEquals(true, model.isInlineDataset()); - Assertions.assertEquals("cwxhmpej", model.commonDslConnectorProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperConnection model = new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("az") - .withParameters(mapOf("lv", "datatggmuwdchozfnkfe", "oaiknaqlnuwig", "datanoakiz"))) - .withLinkedServiceType("xly") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("cwxhmpej").withValue("datake"), - new MapperDslConnectorProperties().withName("onwivkcqhrxhxkn").withValue("datacrmmkyupijuby"), - new MapperDslConnectorProperties().withName("fkak").withValue("datafrkemyildudxja"))); - model = BinaryData.fromObject(model).toObject(MapperConnection.class); - Assertions.assertEquals("az", model.linkedService().referenceName()); - Assertions.assertEquals("xly", model.linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.type()); - Assertions.assertEquals(true, model.isInlineDataset()); - Assertions.assertEquals("cwxhmpej", model.commonDslConnectorProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java deleted file mode 100644 index 1d12a5d35d5d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperDslConnectorPropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import org.junit.jupiter.api.Assertions; - -public final class MapperDslConnectorPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperDslConnectorProperties model = BinaryData.fromString("{\"name\":\"rb\",\"value\":\"dataxsjybvitvqk\"}") - .toObject(MapperDslConnectorProperties.class); - Assertions.assertEquals("rb", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperDslConnectorProperties model - = new MapperDslConnectorProperties().withName("rb").withValue("dataxsjybvitvqk"); - model = BinaryData.fromObject(model).toObject(MapperDslConnectorProperties.class); - Assertions.assertEquals("rb", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java deleted file mode 100644 index 8b7dd35f0972..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyRecurrenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import org.junit.jupiter.api.Assertions; - -public final class MapperPolicyRecurrenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperPolicyRecurrence model = BinaryData.fromString("{\"frequency\":\"Minute\",\"interval\":1079278403}") - .toObject(MapperPolicyRecurrence.class); - Assertions.assertEquals(FrequencyType.MINUTE, model.frequency()); - Assertions.assertEquals(1079278403, model.interval()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperPolicyRecurrence model - = new MapperPolicyRecurrence().withFrequency(FrequencyType.MINUTE).withInterval(1079278403); - model = BinaryData.fromObject(model).toObject(MapperPolicyRecurrence.class); - Assertions.assertEquals(FrequencyType.MINUTE, model.frequency()); - Assertions.assertEquals(1079278403, model.interval()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java deleted file mode 100644 index 63964d7a4bfb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperPolicyTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.FrequencyType; -import com.azure.resourcemanager.datafactory.models.MapperPolicy; -import com.azure.resourcemanager.datafactory.models.MapperPolicyRecurrence; -import org.junit.jupiter.api.Assertions; - -public final class MapperPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperPolicy model = BinaryData - .fromString("{\"mode\":\"ytiq\",\"recurrence\":{\"frequency\":\"Hour\",\"interval\":602247421}}") - .toObject(MapperPolicy.class); - Assertions.assertEquals("ytiq", model.mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.recurrence().frequency()); - Assertions.assertEquals(602247421, model.recurrence().interval()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperPolicy model = new MapperPolicy().withMode("ytiq") - .withRecurrence(new MapperPolicyRecurrence().withFrequency(FrequencyType.HOUR).withInterval(602247421)); - model = BinaryData.fromObject(model).toObject(MapperPolicy.class); - Assertions.assertEquals("ytiq", model.mode()); - Assertions.assertEquals(FrequencyType.HOUR, model.recurrence().frequency()); - Assertions.assertEquals(602247421, model.recurrence().interval()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java deleted file mode 100644 index 4c3a7dee645f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperSourceConnectionsInfoTests.java +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MapperSourceConnectionsInfoTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperSourceConnectionsInfo model = BinaryData.fromString( - "{\"sourceEntities\":[{\"name\":\"tjqvqyvweht\",\"properties\":{\"schema\":[{\"name\":\"zzy\",\"dataType\":\"vusxiv\"},{\"name\":\"rryveimipskdy\",\"dataType\":\"tv\"}],\"dslConnectorProperties\":[{\"name\":\"aftjvvruxwigsy\",\"value\":\"datapq\"},{\"name\":\"mjtgrqg\",\"value\":\"datakkileplkc\"},{\"name\":\"knh\",\"value\":\"databbaedorvvm\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"oygbdgwumgxd\",\"parameters\":{\"tm\":\"datapabgdexjddvjsaqw\",\"xaptefhexcgjok\":\"datawllcolsr\",\"ek\":\"dataljnhvlqj\",\"xeslkhhustcpoqm\":\"dataeeksnbksdqhjvyk\"}},\"linkedServiceType\":\"nwqjwgokn\",\"type\":\"linkedservicetype\",\"isInlineDataset\":false,\"commonDslConnectorProperties\":[{\"name\":\"ybwfdbkjb\",\"value\":\"dataensvkzykjtj\"},{\"name\":\"sxfwushcdp\",\"value\":\"datapn\"},{\"name\":\"mgjfbpkuwxeoio\",\"value\":\"dataizfavkjzwf\"}]}}") - .toObject(MapperSourceConnectionsInfo.class); - Assertions.assertEquals("tjqvqyvweht", model.sourceEntities().get(0).name()); - Assertions.assertEquals("zzy", model.sourceEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("vusxiv", model.sourceEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("aftjvvruxwigsy", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("oygbdgwumgxd", model.connection().linkedService().referenceName()); - Assertions.assertEquals("nwqjwgokn", model.connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(false, model.connection().isInlineDataset()); - Assertions.assertEquals("ybwfdbkjb", model.connection().commonDslConnectorProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperSourceConnectionsInfo model = new MapperSourceConnectionsInfo() - .withSourceEntities(Arrays.asList(new MapperTable().withName("tjqvqyvweht") - .withSchema(Arrays.asList(new MapperTableSchema().withName("zzy").withDataType("vusxiv"), - new MapperTableSchema().withName("rryveimipskdy").withDataType("tv"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("aftjvvruxwigsy").withValue("datapq"), - new MapperDslConnectorProperties().withName("mjtgrqg").withValue("datakkileplkc"), - new MapperDslConnectorProperties().withName("knh").withValue("databbaedorvvm"))))) - .withConnection( - new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("oygbdgwumgxd") - .withParameters(mapOf("tm", "datapabgdexjddvjsaqw", "xaptefhexcgjok", "datawllcolsr", "ek", - "dataljnhvlqj", "xeslkhhustcpoqm", "dataeeksnbksdqhjvyk"))) - .withLinkedServiceType("nwqjwgokn") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(false) - .withCommonDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("ybwfdbkjb").withValue("dataensvkzykjtj"), - new MapperDslConnectorProperties().withName("sxfwushcdp").withValue("datapn"), - new MapperDslConnectorProperties().withName("mgjfbpkuwxeoio").withValue("dataizfavkjzwf")))); - model = BinaryData.fromObject(model).toObject(MapperSourceConnectionsInfo.class); - Assertions.assertEquals("tjqvqyvweht", model.sourceEntities().get(0).name()); - Assertions.assertEquals("zzy", model.sourceEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("vusxiv", model.sourceEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("aftjvvruxwigsy", model.sourceEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("oygbdgwumgxd", model.connection().linkedService().referenceName()); - Assertions.assertEquals("nwqjwgokn", model.connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(false, model.connection().isInlineDataset()); - Assertions.assertEquals("ybwfdbkjb", model.connection().commonDslConnectorProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java deleted file mode 100644 index 9187c0c1a293..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTablePropertiesTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MapperTableProperties; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class MapperTablePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperTableProperties model = BinaryData.fromString( - "{\"schema\":[{\"name\":\"wetp\",\"dataType\":\"ycyqiqyhgfsetzl\"},{\"name\":\"bsfledynojpziu\",\"dataType\":\"bzkkd\"}],\"dslConnectorProperties\":[{\"name\":\"sycljsel\",\"value\":\"datapbafvafhlbylc\"},{\"name\":\"evxrhyz\",\"value\":\"datawrso\"},{\"name\":\"ltdb\",\"value\":\"datairrhv\"}]}") - .toObject(MapperTableProperties.class); - Assertions.assertEquals("wetp", model.schema().get(0).name()); - Assertions.assertEquals("ycyqiqyhgfsetzl", model.schema().get(0).dataType()); - Assertions.assertEquals("sycljsel", model.dslConnectorProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperTableProperties model = new MapperTableProperties() - .withSchema(Arrays.asList(new MapperTableSchema().withName("wetp").withDataType("ycyqiqyhgfsetzl"), - new MapperTableSchema().withName("bsfledynojpziu").withDataType("bzkkd"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("sycljsel").withValue("datapbafvafhlbylc"), - new MapperDslConnectorProperties().withName("evxrhyz").withValue("datawrso"), - new MapperDslConnectorProperties().withName("ltdb").withValue("datairrhv"))); - model = BinaryData.fromObject(model).toObject(MapperTableProperties.class); - Assertions.assertEquals("wetp", model.schema().get(0).name()); - Assertions.assertEquals("ycyqiqyhgfsetzl", model.schema().get(0).dataType()); - Assertions.assertEquals("sycljsel", model.dslConnectorProperties().get(0).name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java deleted file mode 100644 index 41705875a63f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableSchemaTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import org.junit.jupiter.api.Assertions; - -public final class MapperTableSchemaTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperTableSchema model = BinaryData.fromString("{\"name\":\"nracw\",\"dataType\":\"qigtuujwouhdaws\"}") - .toObject(MapperTableSchema.class); - Assertions.assertEquals("nracw", model.name()); - Assertions.assertEquals("qigtuujwouhdaws", model.dataType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperTableSchema model = new MapperTableSchema().withName("nracw").withDataType("qigtuujwouhdaws"); - model = BinaryData.fromObject(model).toObject(MapperTableSchema.class); - Assertions.assertEquals("nracw", model.name()); - Assertions.assertEquals("qigtuujwouhdaws", model.dataType()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java deleted file mode 100644 index 38348662c805..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTableTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class MapperTableTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperTable model = BinaryData.fromString( - "{\"name\":\"yay\",\"properties\":{\"schema\":[{\"name\":\"sbfwxr\",\"dataType\":\"mdew\"}],\"dslConnectorProperties\":[{\"name\":\"krplbjazejww\",\"value\":\"datayoyp\"},{\"name\":\"hbrnnhjx\",\"value\":\"datawjh\"}]}}") - .toObject(MapperTable.class); - Assertions.assertEquals("yay", model.name()); - Assertions.assertEquals("sbfwxr", model.schema().get(0).name()); - Assertions.assertEquals("mdew", model.schema().get(0).dataType()); - Assertions.assertEquals("krplbjazejww", model.dslConnectorProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperTable model = new MapperTable().withName("yay") - .withSchema(Arrays.asList(new MapperTableSchema().withName("sbfwxr").withDataType("mdew"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("krplbjazejww").withValue("datayoyp"), - new MapperDslConnectorProperties().withName("hbrnnhjx").withValue("datawjh"))); - model = BinaryData.fromObject(model).toObject(MapperTable.class); - Assertions.assertEquals("yay", model.name()); - Assertions.assertEquals("sbfwxr", model.schema().get(0).name()); - Assertions.assertEquals("mdew", model.schema().get(0).dataType()); - Assertions.assertEquals("krplbjazejww", model.dslConnectorProperties().get(0).name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java deleted file mode 100644 index bc4da0c30b23..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MapperTargetConnectionsInfoTests.java +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ConnectionType; -import com.azure.resourcemanager.datafactory.models.DataMapperMapping; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMapping; -import com.azure.resourcemanager.datafactory.models.MapperAttributeMappings; -import com.azure.resourcemanager.datafactory.models.MapperAttributeReference; -import com.azure.resourcemanager.datafactory.models.MapperConnection; -import com.azure.resourcemanager.datafactory.models.MapperConnectionReference; -import com.azure.resourcemanager.datafactory.models.MapperDslConnectorProperties; -import com.azure.resourcemanager.datafactory.models.MapperTable; -import com.azure.resourcemanager.datafactory.models.MapperTableSchema; -import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo; -import com.azure.resourcemanager.datafactory.models.MappingType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MapperTargetConnectionsInfoTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MapperTargetConnectionsInfo model = BinaryData.fromString( - "{\"targetEntities\":[{\"name\":\"vfdjkpdxp\",\"properties\":{\"schema\":[{\"name\":\"nmgzvyfijdkzuqnw\",\"dataType\":\"thuqolyahluq\"},{\"name\":\"ulsutrjbhxy\",\"dataType\":\"hyqezvqq\"}],\"dslConnectorProperties\":[{\"name\":\"ftbcvexreuquow\",\"value\":\"datajv\"},{\"name\":\"hreagk\",\"value\":\"dataxv\"},{\"name\":\"tvbczsulm\",\"value\":\"dataglmep\"},{\"name\":\"fs\",\"value\":\"datakgsangpszng\"}]}},{\"name\":\"p\",\"properties\":{\"schema\":[{\"name\":\"ecjujcng\",\"dataType\":\"dyedmzrgjfo\"}],\"dslConnectorProperties\":[{\"name\":\"noitpkpztr\",\"value\":\"datagxvcoqrasw\"}]}},{\"name\":\"yxp\",\"properties\":{\"schema\":[{\"name\":\"ialwv\",\"dataType\":\"buhzacaq\"}],\"dslConnectorProperties\":[{\"name\":\"co\",\"value\":\"dataujpdsxzak\"},{\"name\":\"jkmvbi\",\"value\":\"datajofqcvovjufycs\"}]}}],\"connection\":{\"linkedService\":{\"referenceName\":\"bemyeji\",\"parameters\":{\"hortu\":\"dataxeg\",\"cbgqnzmnhiil\":\"dataawlpjfelqerpp\",\"ccgzpraoxnyu\":\"dataalwcjgckbb\",\"sgftipwc\":\"datafa\"}},\"linkedServiceType\":\"yubhiqdx\",\"type\":\"linkedservicetype\",\"isInlineDataset\":true,\"commonDslConnectorProperties\":[{\"name\":\"hza\",\"value\":\"datacnuhiigbylbuigv\"},{\"name\":\"atvcrkdlbnbq\",\"value\":\"datah\"},{\"name\":\"yhzlwxaeaovurexd\",\"value\":\"datasbdweaderzmwnt\"}]},\"dataMapperMappings\":[{\"targetEntityName\":\"ttmvmmagoaqylkjz\",\"sourceEntityName\":\"iua\",\"sourceConnectionReference\":{\"connectionName\":\"gmxitpfi\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"krlgjm\",\"type\":\"Aggregate\",\"functionName\":\"vcqguefzh\",\"expression\":\"p\",\"attributeReference\":{},\"attributeReferences\":[{},{},{}]},{\"name\":\"relyujlfyoum\",\"type\":\"Derived\",\"functionName\":\"eclcdigptajbrzm\",\"expression\":\"ucycijo\",\"attributeReference\":{},\"attributeReferences\":[{},{}]}]},\"sourceDenormalizeInfo\":\"datatgjcy\"},{\"targetEntityName\":\"zjd\",\"sourceEntityName\":\"qjbtxjeaoqaqbzgy\",\"sourceConnectionReference\":{\"connectionName\":\"wvua\",\"type\":\"linkedservicetype\"},\"attributeMappingInfo\":{\"attributeMappings\":[{\"name\":\"teuli\",\"type\":\"Derived\",\"functionName\":\"kcvmwfauxxepmy\",\"expression\":\"ormcqmic\",\"attributeReference\":{},\"attributeReferences\":[{},{},{},{}]},{\"name\":\"kzfbojxjmcsmyq\",\"type\":\"Aggregate\",\"functionName\":\"cp\",\"expression\":\"kwywzwofalic\",\"attributeReference\":{},\"attributeReferences\":[{},{},{}]},{\"name\":\"qtamtyvsknxrw\",\"type\":\"Aggregate\",\"functionName\":\"vsbcfhzagxnvhy\",\"expression\":\"dimwrzregzgyu\",\"attributeReference\":{},\"attributeReferences\":[{}]},{\"name\":\"pweryekzk\",\"type\":\"Direct\",\"functionName\":\"ottaw\",\"expression\":\"osxw\",\"attributeReference\":{},\"attributeReferences\":[{}]}]},\"sourceDenormalizeInfo\":\"datatfvpndpmiljpn\"}],\"relationships\":[\"dataudqll\",\"datasauzpjlx\"]}") - .toObject(MapperTargetConnectionsInfo.class); - Assertions.assertEquals("vfdjkpdxp", model.targetEntities().get(0).name()); - Assertions.assertEquals("nmgzvyfijdkzuqnw", model.targetEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("thuqolyahluq", model.targetEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("ftbcvexreuquow", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("bemyeji", model.connection().linkedService().referenceName()); - Assertions.assertEquals("yubhiqdx", model.connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(true, model.connection().isInlineDataset()); - Assertions.assertEquals("hza", model.connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("ttmvmmagoaqylkjz", model.dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("iua", model.dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("gmxitpfi", - model.dataMapperMappings().get(0).sourceConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("krlgjm", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("vcqguefzh", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("p", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).expression()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MapperTargetConnectionsInfo model - = new MapperTargetConnectionsInfo() - .withTargetEntities(Arrays.asList( - new MapperTable().withName("vfdjkpdxp") - .withSchema(Arrays.asList( - new MapperTableSchema().withName("nmgzvyfijdkzuqnw").withDataType("thuqolyahluq"), - new MapperTableSchema().withName("ulsutrjbhxy").withDataType("hyqezvqq"))) - .withDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("ftbcvexreuquow").withValue("datajv"), - new MapperDslConnectorProperties().withName("hreagk").withValue("dataxv"), - new MapperDslConnectorProperties().withName("tvbczsulm").withValue("dataglmep"), - new MapperDslConnectorProperties().withName("fs").withValue("datakgsangpszng"))), - new MapperTable().withName("p") - .withSchema( - Arrays.asList(new MapperTableSchema().withName("ecjujcng").withDataType("dyedmzrgjfo"))) - .withDslConnectorProperties(Arrays.asList( - new MapperDslConnectorProperties().withName("noitpkpztr").withValue("datagxvcoqrasw"))), - new MapperTable().withName("yxp") - .withSchema(Arrays.asList(new MapperTableSchema().withName("ialwv").withDataType("buhzacaq"))) - .withDslConnectorProperties( - Arrays.asList(new MapperDslConnectorProperties().withName("co").withValue("dataujpdsxzak"), - new MapperDslConnectorProperties().withName("jkmvbi") - .withValue("datajofqcvovjufycs"))))) - .withConnection(new MapperConnection() - .withLinkedService(new LinkedServiceReference().withReferenceName("bemyeji") - .withParameters(mapOf("hortu", "dataxeg", "cbgqnzmnhiil", "dataawlpjfelqerpp", "ccgzpraoxnyu", - "dataalwcjgckbb", "sgftipwc", "datafa"))) - .withLinkedServiceType("yubhiqdx") - .withType(ConnectionType.LINKEDSERVICETYPE) - .withIsInlineDataset(true) - .withCommonDslConnectorProperties(Arrays - .asList(new MapperDslConnectorProperties().withName("hza").withValue("datacnuhiigbylbuigv"), - new MapperDslConnectorProperties().withName("atvcrkdlbnbq").withValue("datah"), - new MapperDslConnectorProperties().withName("yhzlwxaeaovurexd") - .withValue("datasbdweaderzmwnt")))) - .withDataMapperMappings(Arrays.asList( - new DataMapperMapping().withTargetEntityName("ttmvmmagoaqylkjz") - .withSourceEntityName("iua") - .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("gmxitpfi") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings().withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("krlgjm") - .withType(MappingType.AGGREGATE) - .withFunctionName("vcqguefzh") - .withExpression("p") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), - new MapperAttributeReference(), new MapperAttributeReference())), - new MapperAttributeMapping().withName("relyujlfyoum") - .withType(MappingType.DERIVED) - .withFunctionName("eclcdigptajbrzm") - .withExpression("ucycijo") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), - new MapperAttributeReference()))))) - .withSourceDenormalizeInfo("datatgjcy"), - new DataMapperMapping().withTargetEntityName("zjd") - .withSourceEntityName("qjbtxjeaoqaqbzgy") - .withSourceConnectionReference(new MapperConnectionReference().withConnectionName("wvua") - .withType(ConnectionType.LINKEDSERVICETYPE)) - .withAttributeMappingInfo( - new MapperAttributeMappings() - .withAttributeMappings(Arrays.asList( - new MapperAttributeMapping().withName("teuli") - .withType(MappingType.DERIVED) - .withFunctionName("kcvmwfauxxepmy") - .withExpression("ormcqmic") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), - new MapperAttributeReference(), new MapperAttributeReference(), - new MapperAttributeReference())), - new MapperAttributeMapping().withName("kzfbojxjmcsmyq") - .withType(MappingType.AGGREGATE) - .withFunctionName("cp") - .withExpression("kwywzwofalic") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference(), - new MapperAttributeReference(), new MapperAttributeReference())), - new MapperAttributeMapping().withName("qtamtyvsknxrw") - .withType(MappingType.AGGREGATE) - .withFunctionName("vsbcfhzagxnvhy") - .withExpression("dimwrzregzgyu") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference())), - new MapperAttributeMapping().withName("pweryekzk") - .withType(MappingType.DIRECT) - .withFunctionName("ottaw") - .withExpression("osxw") - .withAttributeReference(new MapperAttributeReference()) - .withAttributeReferences(Arrays.asList(new MapperAttributeReference()))))) - .withSourceDenormalizeInfo("datatfvpndpmiljpn"))) - .withRelationships(Arrays.asList("dataudqll", "datasauzpjlx")); - model = BinaryData.fromObject(model).toObject(MapperTargetConnectionsInfo.class); - Assertions.assertEquals("vfdjkpdxp", model.targetEntities().get(0).name()); - Assertions.assertEquals("nmgzvyfijdkzuqnw", model.targetEntities().get(0).schema().get(0).name()); - Assertions.assertEquals("thuqolyahluq", model.targetEntities().get(0).schema().get(0).dataType()); - Assertions.assertEquals("ftbcvexreuquow", model.targetEntities().get(0).dslConnectorProperties().get(0).name()); - Assertions.assertEquals("bemyeji", model.connection().linkedService().referenceName()); - Assertions.assertEquals("yubhiqdx", model.connection().linkedServiceType()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, model.connection().type()); - Assertions.assertEquals(true, model.connection().isInlineDataset()); - Assertions.assertEquals("hza", model.connection().commonDslConnectorProperties().get(0).name()); - Assertions.assertEquals("ttmvmmagoaqylkjz", model.dataMapperMappings().get(0).targetEntityName()); - Assertions.assertEquals("iua", model.dataMapperMappings().get(0).sourceEntityName()); - Assertions.assertEquals("gmxitpfi", - model.dataMapperMappings().get(0).sourceConnectionReference().connectionName()); - Assertions.assertEquals(ConnectionType.LINKEDSERVICETYPE, - model.dataMapperMappings().get(0).sourceConnectionReference().type()); - Assertions.assertEquals("krlgjm", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).name()); - Assertions.assertEquals(MappingType.AGGREGATE, - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).type()); - Assertions.assertEquals("vcqguefzh", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).functionName()); - Assertions.assertEquals("p", - model.dataMapperMappings().get(0).attributeMappingInfo().attributeMappings().get(0).expression()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java deleted file mode 100644 index 9aa552932736..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTests.java +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSink; -import com.azure.resourcemanager.datafactory.models.DataFlowSource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MappingDataFlow; -import com.azure.resourcemanager.datafactory.models.Transformation; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MappingDataFlowTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MappingDataFlow model = BinaryData.fromString( - "{\"type\":\"MappingDataFlow\",\"typeProperties\":{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"trpb\",\"parameters\":{\"lbnwtpcpahp\":\"datayuuatvlmbjwcolbm\"}},\"name\":\"z\",\"description\":\"xhmtfhocnxzc\",\"dataset\":{\"referenceName\":\"hngxnoqrxtd\",\"parameters\":{\"epfwwt\":\"datajevhdlmydidw\",\"o\":\"dataf\",\"wcdbckyoik\":\"datasxxh\",\"rbhtmeplvukaobr\":\"datakxhnegknj\"}},\"linkedService\":{\"referenceName\":\"pgsn\",\"parameters\":{\"hjym\":\"datanchjhgemuowakywa\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qt\",\"datasetParameters\":\"datadrclssoljome\",\"parameters\":{\"gjiiytssiki\":\"dataycnlbvgjcodk\",\"gmqsorhcekxg\":\"databcufqbvntn\"},\"\":{\"cpwzv\":\"datakm\",\"qba\":\"datadoksqdtiwlwxlbon\",\"xbyja\":\"dataqicqchygt\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"bdpkxyqvgx\",\"parameters\":{\"hkxdxuwsaifmc\":\"datadetv\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"osbz\",\"parameters\":{\"bcknjolgj\":\"datagcv\"}},\"name\":\"yxpvelszerqze\",\"description\":\"o\",\"dataset\":{\"referenceName\":\"intxwa\",\"parameters\":{\"ulhmzyq\":\"datalzoblqwaafr\",\"gaxwmzwdfkbnrzo\":\"datahdvafjrqpjiyrqjc\",\"tqjfgxxsaet\":\"datapdltbq\"}},\"linkedService\":{\"referenceName\":\"dgvpyig\",\"parameters\":{\"uwjoedxnguca\":\"dataqilzdc\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"aurwwgilfjq\",\"datasetParameters\":\"datacdmkxwxdcvjw\",\"parameters\":{\"xrtuicds\":\"dataiakeciqc\"},\"\":{\"hzzwvywrgyngy\":\"datafmmp\",\"clamgglvlmfejdoq\":\"datagrpxncakiqaondjr\",\"gxhqfgqkayejs\":\"dataykglt\"}}},{\"schemaLinkedService\":{\"referenceName\":\"lgflwfg\",\"parameters\":{\"ijjcea\":\"datau\",\"a\":\"datalijjjrtvam\",\"cxetyvkunmignoh\":\"datazknxkv\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"kgqogjw\",\"parameters\":{\"eilbjywfcf\":\"datadedvabbxbhme\"}},\"name\":\"zir\",\"description\":\"ihvwyp\",\"dataset\":{\"referenceName\":\"u\",\"parameters\":{\"djsllfr\":\"datalczwci\",\"frgnawbabgfbktyj\":\"datavdmvxadqa\",\"xqmjeajcxn\":\"datafczlfsyqkfrbzgow\",\"hlusrvxisi\":\"dataqg\"}},\"linkedService\":{\"referenceName\":\"ceagbjqvlsumywz\",\"parameters\":{\"oyjfqipu\":\"dataxgo\",\"gv\":\"datayznclkfkee\",\"l\":\"datapemtuoqu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"qavn\",\"datasetParameters\":\"dataflqqbtnyjpylxd\",\"parameters\":{\"kottlwuhvajmailf\":\"dataabmvmsxbaevwjc\"},\"\":{\"xbdmvrscmqernd\":\"datajzakzwjiqull\",\"dj\":\"datarnyeofltfnnxrk\",\"ipfohykfkx\":\"dataynnfmuiii\",\"woiymrvz\":\"databcbrwjiutgnjizbe\"}}}],\"transformations\":[{\"name\":\"yrsrziuctixgb\",\"description\":\"uifr\",\"dataset\":{\"referenceName\":\"kaapezkiswqjmdg\",\"parameters\":{\"jczjnciuiyqv\":\"dataparybjufpt\",\"sqqzlgcndhz\":\"datadaswvpp\",\"cfsrhkhgsn\":\"datarr\",\"pphefsbzx\":\"datauww\"}},\"linkedService\":{\"referenceName\":\"zxomeikjclwz\",\"parameters\":{\"qtaazyqbxy\":\"datamwpfs\",\"iqezxlhdj\":\"dataoyfpuq\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"cadwvpsozjii\",\"datasetParameters\":\"datariybmrz\",\"parameters\":{\"fnmxa\":\"datanxwdvwnjkg\"},\"\":{\"tibtyi\":\"dataq\",\"jpnqnoowsbeden\":\"datauyvpirf\",\"ucnulgmnh\":\"dataexkxbhx\",\"fajs\":\"dataevdyz\"}}},{\"name\":\"kskmqozzkivyhjrl\",\"description\":\"zji\",\"dataset\":{\"referenceName\":\"qfhefkwabsol\",\"parameters\":{\"exhvuqbozoolz\":\"dataqqlmgnlqxsjxte\",\"nx\":\"dataocarkuzlbcnndt\"}},\"linkedService\":{\"referenceName\":\"qytl\",\"parameters\":{\"ckze\":\"datayzm\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"akckywym\",\"datasetParameters\":\"dataaabjkdtfohfao\",\"parameters\":{\"y\":\"datakiwrsiw\",\"rra\":\"dataquryk\",\"s\":\"dataeek\",\"gdda\":\"dataeh\"},\"\":{\"mptyrilkfbnrqqxv\":\"databgydlqidywm\",\"swbnfddepldwqjns\":\"datatpbnfnqtxjtoma\"}}},{\"name\":\"zygleexahvm\",\"description\":\"hsbrcary\",\"dataset\":{\"referenceName\":\"jjzyvoaqajuve\",\"parameters\":{\"be\":\"datatdmkrrbhmpful\",\"ununm\":\"datagybpmfb\",\"kdschlzvfictnkjj\":\"datazkrvfyi\",\"hbkgfyrt\":\"datagcwn\"}},\"linkedService\":{\"referenceName\":\"mhmjpjs\",\"parameters\":{\"arxifvqnrxt\":\"datapdqwtygevgwmse\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"jptnvwjhrsidqpx\",\"datasetParameters\":\"datatpakf\",\"parameters\":{\"ykyutrymdwmfj\":\"dataatw\",\"efgnaavuagnt\":\"datapycvjqdvdwkqpldr\",\"hspfefyihd\":\"datataoutnpdct\"},\"\":{\"hmtybkcgsuthhll\":\"datauyld\",\"jrr\":\"datamwynefxexlfciatx\",\"ajfoxc\":\"datakmdskjhhxd\"}}}],\"script\":\"cvslxlhuavkrmukm\",\"scriptLines\":[\"kxettcslojfkq\",\"dnqtoqxjhqx\",\"s\"]},\"description\":\"tkbtnqlrngl\",\"annotations\":[\"dataiipsnawwlqkz\",\"dataxhhllxricct\",\"datawmuqqoajxeiygle\",\"datarwvaexhdc\"],\"folder\":{\"name\":\"eqnkbrupobehdml\"}}") - .toObject(MappingDataFlow.class); - Assertions.assertEquals("tkbtnqlrngl", model.description()); - Assertions.assertEquals("eqnkbrupobehdml", model.folder().name()); - Assertions.assertEquals("z", model.sources().get(0).name()); - Assertions.assertEquals("xhmtfhocnxzc", model.sources().get(0).description()); - Assertions.assertEquals("hngxnoqrxtd", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("pgsn", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("qt", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("trpb", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("yxpvelszerqze", model.sinks().get(0).name()); - Assertions.assertEquals("o", model.sinks().get(0).description()); - Assertions.assertEquals("intxwa", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("dgvpyig", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("aurwwgilfjq", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("bdpkxyqvgx", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("osbz", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("yrsrziuctixgb", model.transformations().get(0).name()); - Assertions.assertEquals("uifr", model.transformations().get(0).description()); - Assertions.assertEquals("kaapezkiswqjmdg", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("zxomeikjclwz", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("cadwvpsozjii", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("cvslxlhuavkrmukm", model.script()); - Assertions.assertEquals("kxettcslojfkq", model.scriptLines().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MappingDataFlow model = new MappingDataFlow().withDescription("tkbtnqlrngl") - .withAnnotations( - Arrays.asList("dataiipsnawwlqkz", "dataxhhllxricct", "datawmuqqoajxeiygle", "datarwvaexhdc")) - .withFolder(new DataFlowFolder().withName("eqnkbrupobehdml")) - .withSources(Arrays.asList(new DataFlowSource().withName("z") - .withDescription("xhmtfhocnxzc") - .withDataset(new DatasetReference().withReferenceName("hngxnoqrxtd") - .withParameters(mapOf("epfwwt", "datajevhdlmydidw", "o", "dataf", "wcdbckyoik", "datasxxh", - "rbhtmeplvukaobr", "datakxhnegknj"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("pgsn") - .withParameters(mapOf("hjym", "datanchjhgemuowakywa"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("qt") - .withDatasetParameters("datadrclssoljome") - .withParameters(mapOf("gjiiytssiki", "dataycnlbvgjcodk", "gmqsorhcekxg", "databcufqbvntn")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("trpb") - .withParameters(mapOf("lbnwtpcpahp", "datayuuatvlmbjwcolbm"))))) - .withSinks(Arrays.asList( - new DataFlowSink().withName("yxpvelszerqze") - .withDescription("o") - .withDataset(new DatasetReference().withReferenceName("intxwa") - .withParameters(mapOf("ulhmzyq", "datalzoblqwaafr", "gaxwmzwdfkbnrzo", "datahdvafjrqpjiyrqjc", - "tqjfgxxsaet", "datapdltbq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("dgvpyig") - .withParameters(mapOf("uwjoedxnguca", "dataqilzdc"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("aurwwgilfjq") - .withDatasetParameters("datacdmkxwxdcvjw") - .withParameters(mapOf("xrtuicds", "dataiakeciqc")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("bdpkxyqvgx") - .withParameters(mapOf("hkxdxuwsaifmc", "datadetv"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("osbz") - .withParameters(mapOf("bcknjolgj", "datagcv"))), - new DataFlowSink().withName("zir") - .withDescription("ihvwyp") - .withDataset(new DatasetReference().withReferenceName("u") - .withParameters(mapOf("djsllfr", "datalczwci", "frgnawbabgfbktyj", "datavdmvxadqa", - "xqmjeajcxn", "datafczlfsyqkfrbzgow", "hlusrvxisi", "dataqg"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("ceagbjqvlsumywz") - .withParameters(mapOf("oyjfqipu", "dataxgo", "gv", "datayznclkfkee", "l", "datapemtuoqu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("qavn") - .withDatasetParameters("dataflqqbtnyjpylxd") - .withParameters(mapOf("kottlwuhvajmailf", "dataabmvmsxbaevwjc")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("lgflwfg") - .withParameters( - mapOf("ijjcea", "datau", "a", "datalijjjrtvam", "cxetyvkunmignoh", "datazknxkv"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("kgqogjw") - .withParameters(mapOf("eilbjywfcf", "datadedvabbxbhme"))))) - .withTransformations(Arrays.asList( - new Transformation().withName("yrsrziuctixgb") - .withDescription("uifr") - .withDataset(new DatasetReference().withReferenceName("kaapezkiswqjmdg") - .withParameters(mapOf("jczjnciuiyqv", "dataparybjufpt", "sqqzlgcndhz", "datadaswvpp", - "cfsrhkhgsn", "datarr", "pphefsbzx", "datauww"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("zxomeikjclwz") - .withParameters(mapOf("qtaazyqbxy", "datamwpfs", "iqezxlhdj", "dataoyfpuq"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("cadwvpsozjii") - .withDatasetParameters("datariybmrz") - .withParameters(mapOf("fnmxa", "datanxwdvwnjkg")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("kskmqozzkivyhjrl") - .withDescription("zji") - .withDataset(new DatasetReference().withReferenceName("qfhefkwabsol") - .withParameters(mapOf("exhvuqbozoolz", "dataqqlmgnlqxsjxte", "nx", "dataocarkuzlbcnndt"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("qytl").withParameters(mapOf("ckze", "datayzm"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("akckywym") - .withDatasetParameters("dataaabjkdtfohfao") - .withParameters(mapOf("y", "datakiwrsiw", "rra", "dataquryk", "s", "dataeek", "gdda", "dataeh")) - .withAdditionalProperties(mapOf())), - new Transformation().withName("zygleexahvm") - .withDescription("hsbrcary") - .withDataset(new DatasetReference().withReferenceName("jjzyvoaqajuve") - .withParameters(mapOf("be", "datatdmkrrbhmpful", "ununm", "datagybpmfb", "kdschlzvfictnkjj", - "datazkrvfyi", "hbkgfyrt", "datagcwn"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("mhmjpjs") - .withParameters(mapOf("arxifvqnrxt", "datapdqwtygevgwmse"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("jptnvwjhrsidqpx") - .withDatasetParameters("datatpakf") - .withParameters(mapOf("ykyutrymdwmfj", "dataatw", "efgnaavuagnt", "datapycvjqdvdwkqpldr", - "hspfefyihd", "datataoutnpdct")) - .withAdditionalProperties(mapOf())))) - .withScript("cvslxlhuavkrmukm") - .withScriptLines(Arrays.asList("kxettcslojfkq", "dnqtoqxjhqx", "s")); - model = BinaryData.fromObject(model).toObject(MappingDataFlow.class); - Assertions.assertEquals("tkbtnqlrngl", model.description()); - Assertions.assertEquals("eqnkbrupobehdml", model.folder().name()); - Assertions.assertEquals("z", model.sources().get(0).name()); - Assertions.assertEquals("xhmtfhocnxzc", model.sources().get(0).description()); - Assertions.assertEquals("hngxnoqrxtd", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("pgsn", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("qt", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("trpb", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("yxpvelszerqze", model.sinks().get(0).name()); - Assertions.assertEquals("o", model.sinks().get(0).description()); - Assertions.assertEquals("intxwa", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("dgvpyig", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("aurwwgilfjq", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("bdpkxyqvgx", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("osbz", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("yrsrziuctixgb", model.transformations().get(0).name()); - Assertions.assertEquals("uifr", model.transformations().get(0).description()); - Assertions.assertEquals("kaapezkiswqjmdg", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("zxomeikjclwz", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("cadwvpsozjii", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("cvslxlhuavkrmukm", model.script()); - Assertions.assertEquals("kxettcslojfkq", model.scriptLines().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java deleted file mode 100644 index 666b3b10b8cb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MappingDataFlowTypePropertiesTests.java +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MappingDataFlowTypeProperties; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DataFlowSink; -import com.azure.resourcemanager.datafactory.models.DataFlowSource; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.Transformation; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MappingDataFlowTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MappingDataFlowTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"schemaLinkedService\":{\"referenceName\":\"vume\",\"parameters\":{\"eqir\":\"databibnzpphepifex\",\"pjlvczuoda\":\"datacjclykcgxv\"}},\"name\":\"punettepdjxq\",\"description\":\"koynuiylpckae\",\"dataset\":{\"referenceName\":\"edveskwxegqp\",\"parameters\":{\"hctmjtsgh\":\"datafn\",\"rpzeqac\":\"databcbcpz\"}},\"linkedService\":{\"referenceName\":\"dtzmpype\",\"parameters\":{\"fkhuytu\":\"dataczshnuqndaizu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"hmt\",\"datasetParameters\":\"datavegwqiukvz\",\"parameters\":{\"i\":\"datawtthaokgksk\",\"hajqfuk\":\"databs\"},\"\":{\"exyionofninbd\":\"dataxpgeumilhwuitr\",\"rsmpcbbprtuga\":\"datazsxcwq\",\"fogdrtbfcm\":\"databzbcyksiv\"}}},{\"schemaLinkedService\":{\"referenceName\":\"ftsjcwj\",\"parameters\":{\"cifhocjxwkl\":\"datagmbawvifdxk\",\"cmufunlcpxxv\":\"dataozrvtx\",\"quv\":\"dataryeyngjgv\"}},\"name\":\"ygg\",\"description\":\"mcrdcue\",\"dataset\":{\"referenceName\":\"tiahxmfq\",\"parameters\":{\"zqbglcjkays\":\"datarvs\",\"oskkfmk\":\"datathzodubtlmjtgbl\"}},\"linkedService\":{\"referenceName\":\"djxyxgbkkqvjcteo\",\"parameters\":{\"vjnzdpvocojhpcna\":\"datarslskkzp\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"fsnggytexvzilm\",\"datasetParameters\":\"datavzkwwwncknr\",\"parameters\":{\"yrth\":\"datajlskzptjxulweu\",\"hokamvfej\":\"dataqlehmcgcjeinu\",\"abzfivf\":\"dataqnttmbq\"},\"\":{\"jfwyyrlhgenuze\":\"dataysthhza\",\"zlsmmdqgmihzpim\":\"datagvkvebaqszll\"}}}],\"sinks\":[{\"schemaLinkedService\":{\"referenceName\":\"nxtminklog\",\"parameters\":{\"bjwzzos\":\"datatzarhzvqnsqktc\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"kybtglwkzpgajsqj\",\"parameters\":{\"uqrebluimmbwx\":\"dataqbmfuvqarwz\",\"kraokq\":\"datafgtdmbvx\",\"aokbavlyttaaknwf\":\"databudbt\"}},\"name\":\"ke\",\"description\":\"mhpdu\",\"dataset\":{\"referenceName\":\"igatolekscbctna\",\"parameters\":{\"dpkawnsnl\":\"datamwbzxpdc\",\"bicziuswswj\":\"dataimouxwksqmudmfco\",\"fwbivqvo\":\"datakbqsjhbtqqvyfscy\",\"wvbhlimbyq\":\"datafuy\"}},\"linkedService\":{\"referenceName\":\"r\",\"parameters\":{\"asaxxo\":\"datalikcdrd\",\"kwiy\":\"datasm\",\"ukosrn\":\"datav\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vzmlnkoywsxv\",\"datasetParameters\":\"databjqqaxuyvymcn\",\"parameters\":{\"wxqweuipmpvksmi\":\"datadoabhj\",\"krdpqgfhyrfr\":\"datansqxtltc\",\"rcwfcmfcnrjajq\":\"datakkld\",\"zqgxx\":\"dataatxjtiel\"},\"\":{\"prnzc\":\"databmtlpqagyno\",\"ryqxzxa\":\"datalin\",\"mqimiymqru\":\"datazi\",\"asvvoqsbpkfl\":\"dataguhfupe\"}}}],\"transformations\":[{\"name\":\"k\",\"description\":\"syaowuzowp\",\"dataset\":{\"referenceName\":\"hdkcprgu\",\"parameters\":{\"mqrud\":\"dataztiochluti\",\"mfbcpaqktkrum\":\"dataizcbfzmcrunfhiuc\",\"dkyzbfvxov\":\"datau\",\"hyhlwcjsqg\":\"datakxiuxqggvqr\"}},\"linkedService\":{\"referenceName\":\"hffbxrq\",\"parameters\":{\"wwmjs\":\"datajpeuqlsdxeqztvxw\",\"wwa\":\"dataen\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"leqioulndh\",\"datasetParameters\":\"dataoeojhto\",\"parameters\":{\"novyoanfbcs\":\"datasvidmytzlnglx\"},\"\":{\"jrktpgaeukya\":\"datagywvtxig\"}}}],\"script\":\"hpmwhqnucsk\",\"scriptLines\":[\"i\",\"sjt\"]}") - .toObject(MappingDataFlowTypeProperties.class); - Assertions.assertEquals("punettepdjxq", model.sources().get(0).name()); - Assertions.assertEquals("koynuiylpckae", model.sources().get(0).description()); - Assertions.assertEquals("edveskwxegqp", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("dtzmpype", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("hmt", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("vume", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ke", model.sinks().get(0).name()); - Assertions.assertEquals("mhpdu", model.sinks().get(0).description()); - Assertions.assertEquals("igatolekscbctna", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("r", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("vzmlnkoywsxv", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("nxtminklog", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kybtglwkzpgajsqj", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("k", model.transformations().get(0).name()); - Assertions.assertEquals("syaowuzowp", model.transformations().get(0).description()); - Assertions.assertEquals("hdkcprgu", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("hffbxrq", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("leqioulndh", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("hpmwhqnucsk", model.script()); - Assertions.assertEquals("i", model.scriptLines().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MappingDataFlowTypeProperties model = new MappingDataFlowTypeProperties() - .withSources(Arrays.asList( - new DataFlowSource().withName("punettepdjxq") - .withDescription("koynuiylpckae") - .withDataset(new DatasetReference().withReferenceName("edveskwxegqp") - .withParameters(mapOf("hctmjtsgh", "datafn", "rpzeqac", "databcbcpz"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("dtzmpype") - .withParameters(mapOf("fkhuytu", "dataczshnuqndaizu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("hmt") - .withDatasetParameters("datavegwqiukvz") - .withParameters(mapOf("i", "datawtthaokgksk", "hajqfuk", "databs")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference() - .withReferenceName("vume") - .withParameters(mapOf("eqir", "databibnzpphepifex", "pjlvczuoda", "datacjclykcgxv"))), - new DataFlowSource().withName("ygg") - .withDescription("mcrdcue") - .withDataset(new DatasetReference().withReferenceName("tiahxmfq") - .withParameters(mapOf("zqbglcjkays", "datarvs", "oskkfmk", "datathzodubtlmjtgbl"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("djxyxgbkkqvjcteo") - .withParameters(mapOf("vjnzdpvocojhpcna", "datarslskkzp"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("fsnggytexvzilm") - .withDatasetParameters("datavzkwwwncknr") - .withParameters(mapOf("yrth", "datajlskzptjxulweu", "hokamvfej", "dataqlehmcgcjeinu", "abzfivf", - "dataqnttmbq")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ftsjcwj") - .withParameters(mapOf("cifhocjxwkl", "datagmbawvifdxk", "cmufunlcpxxv", "dataozrvtx", "quv", - "dataryeyngjgv"))))) - .withSinks(Arrays.asList(new DataFlowSink().withName("ke") - .withDescription("mhpdu") - .withDataset(new DatasetReference().withReferenceName("igatolekscbctna") - .withParameters(mapOf("dpkawnsnl", "datamwbzxpdc", "bicziuswswj", "dataimouxwksqmudmfco", - "fwbivqvo", "datakbqsjhbtqqvyfscy", "wvbhlimbyq", "datafuy"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("r") - .withParameters(mapOf("asaxxo", "datalikcdrd", "kwiy", "datasm", "ukosrn", "datav"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vzmlnkoywsxv") - .withDatasetParameters("databjqqaxuyvymcn") - .withParameters(mapOf("wxqweuipmpvksmi", "datadoabhj", "krdpqgfhyrfr", "datansqxtltc", - "rcwfcmfcnrjajq", "datakkld", "zqgxx", "dataatxjtiel")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("nxtminklog") - .withParameters(mapOf("bjwzzos", "datatzarhzvqnsqktc"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("kybtglwkzpgajsqj") - .withParameters(mapOf("uqrebluimmbwx", "dataqbmfuvqarwz", "kraokq", "datafgtdmbvx", - "aokbavlyttaaknwf", "databudbt"))))) - .withTransformations(Arrays.asList(new Transformation().withName("k") - .withDescription("syaowuzowp") - .withDataset(new DatasetReference().withReferenceName("hdkcprgu") - .withParameters(mapOf("mqrud", "dataztiochluti", "mfbcpaqktkrum", "dataizcbfzmcrunfhiuc", - "dkyzbfvxov", "datau", "hyhlwcjsqg", "datakxiuxqggvqr"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("hffbxrq") - .withParameters(mapOf("wwmjs", "datajpeuqlsdxeqztvxw", "wwa", "dataen"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("leqioulndh") - .withDatasetParameters("dataoeojhto") - .withParameters(mapOf("novyoanfbcs", "datasvidmytzlnglx")) - .withAdditionalProperties(mapOf())))) - .withScript("hpmwhqnucsk") - .withScriptLines(Arrays.asList("i", "sjt")); - model = BinaryData.fromObject(model).toObject(MappingDataFlowTypeProperties.class); - Assertions.assertEquals("punettepdjxq", model.sources().get(0).name()); - Assertions.assertEquals("koynuiylpckae", model.sources().get(0).description()); - Assertions.assertEquals("edveskwxegqp", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("dtzmpype", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("hmt", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("vume", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ke", model.sinks().get(0).name()); - Assertions.assertEquals("mhpdu", model.sinks().get(0).description()); - Assertions.assertEquals("igatolekscbctna", model.sinks().get(0).dataset().referenceName()); - Assertions.assertEquals("r", model.sinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sinks().get(0).flowlet().type()); - Assertions.assertEquals("vzmlnkoywsxv", model.sinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("nxtminklog", model.sinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("kybtglwkzpgajsqj", model.sinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("k", model.transformations().get(0).name()); - Assertions.assertEquals("syaowuzowp", model.transformations().get(0).description()); - Assertions.assertEquals("hdkcprgu", model.transformations().get(0).dataset().referenceName()); - Assertions.assertEquals("hffbxrq", model.transformations().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.transformations().get(0).flowlet().type()); - Assertions.assertEquals("leqioulndh", model.transformations().get(0).flowlet().referenceName()); - Assertions.assertEquals("hpmwhqnucsk", model.script()); - Assertions.assertEquals("i", model.scriptLines().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java deleted file mode 100644 index ecc09664754c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MariaDBSource; - -public final class MariaDBSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MariaDBSource model = BinaryData.fromString( - "{\"type\":\"MariaDBSource\",\"query\":\"datajvmllyjelnhmu\",\"queryTimeout\":\"dataxkofzx\",\"additionalColumns\":\"datasleokbama\",\"sourceRetryCount\":\"datawgccgblepamvl\",\"sourceRetryWait\":\"dataxdaoj\",\"maxConcurrentConnections\":\"datalqoxwqlnxvnmrl\",\"disableMetricsCollection\":\"datajzya\",\"\":{\"zp\":\"dataecwnufldzjcpvhj\",\"boxvwtlnv\":\"datahfejgpef\",\"rje\":\"datashtujaqpkupnr\"}}") - .toObject(MariaDBSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MariaDBSource model = new MariaDBSource().withSourceRetryCount("datawgccgblepamvl") - .withSourceRetryWait("dataxdaoj") - .withMaxConcurrentConnections("datalqoxwqlnxvnmrl") - .withDisableMetricsCollection("datajzya") - .withQueryTimeout("dataxkofzx") - .withAdditionalColumns("datasleokbama") - .withQuery("datajvmllyjelnhmu"); - model = BinaryData.fromObject(model).toObject(MariaDBSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java deleted file mode 100644 index e9d82f277959..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MariaDBTableDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MariaDBTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MariaDBTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MariaDBTableDataset model = BinaryData.fromString( - "{\"type\":\"MariaDBTable\",\"typeProperties\":{\"tableName\":\"dataswarmybwmrotge\"},\"description\":\"yqnipehfwwcb\",\"structure\":\"dataxia\",\"schema\":\"datavjucfjisosfzl\",\"linkedServiceName\":{\"referenceName\":\"raxnfyzguax\",\"parameters\":{\"mihuvrqp\":\"datajixgofqdqws\",\"prpensbmz\":\"dataxdoicqpkntly\",\"bfex\":\"dataritukoym\",\"qfzwanduhduw\":\"dataizzjxwj\"}},\"parameters\":{\"dsf\":{\"type\":\"Int\",\"defaultValue\":\"datatqmri\"},\"razhvchvvoyiogb\":{\"type\":\"Array\",\"defaultValue\":\"datakiumjfgoxedr\"},\"smxtlcappnvc\":{\"type\":\"Bool\",\"defaultValue\":\"datawzruzsoowx\"}},\"annotations\":[\"datapciryomhkdwuw\",\"datadupbkmzkwhjjsqw\",\"dataaefe\",\"datavvkxdbnmc\"],\"folder\":{\"name\":\"ycdzdob\"},\"\":{\"tw\":\"datadyvfxnzpfdfup\",\"lewgsltut\":\"datadpsegivytabvbbk\",\"cqsxwclykcrudek\":\"datauvedwuuqbmen\"}}") - .toObject(MariaDBTableDataset.class); - Assertions.assertEquals("yqnipehfwwcb", model.description()); - Assertions.assertEquals("raxnfyzguax", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("dsf").type()); - Assertions.assertEquals("ycdzdob", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MariaDBTableDataset model = new MariaDBTableDataset().withDescription("yqnipehfwwcb") - .withStructure("dataxia") - .withSchema("datavjucfjisosfzl") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("raxnfyzguax") - .withParameters(mapOf("mihuvrqp", "datajixgofqdqws", "prpensbmz", "dataxdoicqpkntly", "bfex", - "dataritukoym", "qfzwanduhduw", "dataizzjxwj"))) - .withParameters( - mapOf("dsf", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatqmri"), - "razhvchvvoyiogb", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datakiumjfgoxedr"), - "smxtlcappnvc", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawzruzsoowx"))) - .withAnnotations(Arrays.asList("datapciryomhkdwuw", "datadupbkmzkwhjjsqw", "dataaefe", "datavvkxdbnmc")) - .withFolder(new DatasetFolder().withName("ycdzdob")) - .withTableName("dataswarmybwmrotge"); - model = BinaryData.fromObject(model).toObject(MariaDBTableDataset.class); - Assertions.assertEquals("yqnipehfwwcb", model.description()); - Assertions.assertEquals("raxnfyzguax", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("dsf").type()); - Assertions.assertEquals("ycdzdob", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java deleted file mode 100644 index 7a52ce5fb34b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MarketoObjectDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MarketoObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MarketoObjectDataset model = BinaryData.fromString( - "{\"type\":\"MarketoObject\",\"typeProperties\":{\"tableName\":\"datatnpkbvzpk\"},\"description\":\"ngvnqdj\",\"structure\":\"databtwgn\",\"schema\":\"dataxuczlhvbqycznrir\",\"linkedServiceName\":{\"referenceName\":\"iiuv\",\"parameters\":{\"cwtvmijccp\":\"dataqkqwucqsdgb\"}},\"parameters\":{\"jmaih\":{\"type\":\"Array\",\"defaultValue\":\"datamyvwp\"},\"vo\":{\"type\":\"Bool\",\"defaultValue\":\"datalbhxjppcbqetfzfp\"}},\"annotations\":[\"datayjwdunjhmprk\"],\"folder\":{\"name\":\"wiuujxsujk\"},\"\":{\"epdp\":\"dataxeegxbnjnc\",\"uyyaescjxna\":\"dataymgbfmd\",\"inzkefkzlxvc\":\"datapyxqbkxdtbfkih\",\"eozlibcbn\":\"datazcg\"}}") - .toObject(MarketoObjectDataset.class); - Assertions.assertEquals("ngvnqdj", model.description()); - Assertions.assertEquals("iiuv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jmaih").type()); - Assertions.assertEquals("wiuujxsujk", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MarketoObjectDataset model = new MarketoObjectDataset().withDescription("ngvnqdj") - .withStructure("databtwgn") - .withSchema("dataxuczlhvbqycznrir") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("iiuv") - .withParameters(mapOf("cwtvmijccp", "dataqkqwucqsdgb"))) - .withParameters(mapOf("jmaih", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datamyvwp"), "vo", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datalbhxjppcbqetfzfp"))) - .withAnnotations(Arrays.asList("datayjwdunjhmprk")) - .withFolder(new DatasetFolder().withName("wiuujxsujk")) - .withTableName("datatnpkbvzpk"); - model = BinaryData.fromObject(model).toObject(MarketoObjectDataset.class); - Assertions.assertEquals("ngvnqdj", model.description()); - Assertions.assertEquals("iiuv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jmaih").type()); - Assertions.assertEquals("wiuujxsujk", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java deleted file mode 100644 index a5c1180b7b0c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MarketoSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MarketoSource; - -public final class MarketoSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MarketoSource model = BinaryData.fromString( - "{\"type\":\"MarketoSource\",\"query\":\"datakdmezaunbw\",\"queryTimeout\":\"datatigavzdsnrjhj\",\"additionalColumns\":\"dataloae\",\"sourceRetryCount\":\"datalrdkc\",\"sourceRetryWait\":\"dataujvhuuzbsxhi\",\"maxConcurrentConnections\":\"datacu\",\"disableMetricsCollection\":\"dataaaex\",\"\":{\"qfeavzzpm\":\"datartlnzdk\",\"pfraku\":\"datazisljxphwynim\"}}") - .toObject(MarketoSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MarketoSource model = new MarketoSource().withSourceRetryCount("datalrdkc") - .withSourceRetryWait("dataujvhuuzbsxhi") - .withMaxConcurrentConnections("datacu") - .withDisableMetricsCollection("dataaaex") - .withQueryTimeout("datatigavzdsnrjhj") - .withAdditionalColumns("dataloae") - .withQuery("datakdmezaunbw"); - model = BinaryData.fromObject(model).toObject(MarketoSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java deleted file mode 100644 index 3f7818d70243..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MetadataItemTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MetadataItem; - -public final class MetadataItemTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MetadataItem model = BinaryData.fromString("{\"name\":\"datantoe\",\"value\":\"dataufgdgbzftsbpef\"}") - .toObject(MetadataItem.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MetadataItem model = new MetadataItem().withName("datantoe").withValue("dataufgdgbzftsbpef"); - model = BinaryData.fromObject(model).toObject(MetadataItem.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java deleted file mode 100644 index 86629b58219d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MicrosoftAccessSink; - -public final class MicrosoftAccessSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MicrosoftAccessSink model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessSink\",\"preCopyScript\":\"datayipjzgmxqaupy\",\"writeBatchSize\":\"datagoyp\",\"writeBatchTimeout\":\"dataooyyfysn\",\"sinkRetryCount\":\"datajnl\",\"sinkRetryWait\":\"datacmhonojese\",\"maxConcurrentConnections\":\"dataxel\",\"disableMetricsCollection\":\"dataxwmpziy\",\"\":{\"wpcutzlvx\":\"datajswedkfofyfwpu\",\"vddwgozr\":\"dataolvedzrjkrpor\",\"dyhcwcgvyuuse\":\"dataglkmgcxmkrldfo\"}}") - .toObject(MicrosoftAccessSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MicrosoftAccessSink model = new MicrosoftAccessSink().withWriteBatchSize("datagoyp") - .withWriteBatchTimeout("dataooyyfysn") - .withSinkRetryCount("datajnl") - .withSinkRetryWait("datacmhonojese") - .withMaxConcurrentConnections("dataxel") - .withDisableMetricsCollection("dataxwmpziy") - .withPreCopyScript("datayipjzgmxqaupy"); - model = BinaryData.fromObject(model).toObject(MicrosoftAccessSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java deleted file mode 100644 index 75d5123cedee..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MicrosoftAccessSource; - -public final class MicrosoftAccessSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MicrosoftAccessSource model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessSource\",\"query\":\"dataho\",\"additionalColumns\":\"datarsolhhvmfoej\",\"sourceRetryCount\":\"dataiq\",\"sourceRetryWait\":\"datapelnud\",\"maxConcurrentConnections\":\"dataierxxorsdvui\",\"disableMetricsCollection\":\"datafk\",\"\":{\"vmirybwgaccvw\":\"datalfgmdoaihlvrsqc\"}}") - .toObject(MicrosoftAccessSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MicrosoftAccessSource model = new MicrosoftAccessSource().withSourceRetryCount("dataiq") - .withSourceRetryWait("datapelnud") - .withMaxConcurrentConnections("dataierxxorsdvui") - .withDisableMetricsCollection("datafk") - .withQuery("dataho") - .withAdditionalColumns("datarsolhhvmfoej"); - model = BinaryData.fromObject(model).toObject(MicrosoftAccessSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java deleted file mode 100644 index 546dadcd2498..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MicrosoftAccessTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MicrosoftAccessTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MicrosoftAccessTableDataset model = BinaryData.fromString( - "{\"type\":\"MicrosoftAccessTable\",\"typeProperties\":{\"tableName\":\"datamotahbqsvnk\"},\"description\":\"ytzuaedrl\",\"structure\":\"datagc\",\"schema\":\"datayrhkvxzzmiem\",\"linkedServiceName\":{\"referenceName\":\"yftgpqoswgfqv\",\"parameters\":{\"qyhls\":\"datahpak\",\"jm\":\"datarnfbmeqagkn\",\"mqmbwpp\":\"databnyevztnjawrhule\",\"xjucl\":\"datairxbkitzmnhit\"}},\"parameters\":{\"gdzdvyljubvfj\":{\"type\":\"Bool\",\"defaultValue\":\"datagcem\"},\"difnivlutgg\":{\"type\":\"String\",\"defaultValue\":\"dataf\"},\"oiyygkts\":{\"type\":\"Object\",\"defaultValue\":\"datacxauhvcgzxhklsqx\"}},\"annotations\":[\"dataxxoxwfzbkv\"],\"folder\":{\"name\":\"xfxp\"},\"\":{\"wm\":\"datawbebsnbwutlv\",\"ustihtgrafjajvky\":\"datau\",\"derjennmk\":\"datammjczvog\"}}") - .toObject(MicrosoftAccessTableDataset.class); - Assertions.assertEquals("ytzuaedrl", model.description()); - Assertions.assertEquals("yftgpqoswgfqv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gdzdvyljubvfj").type()); - Assertions.assertEquals("xfxp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MicrosoftAccessTableDataset model = new MicrosoftAccessTableDataset().withDescription("ytzuaedrl") - .withStructure("datagc") - .withSchema("datayrhkvxzzmiem") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yftgpqoswgfqv") - .withParameters(mapOf("qyhls", "datahpak", "jm", "datarnfbmeqagkn", "mqmbwpp", "databnyevztnjawrhule", - "xjucl", "datairxbkitzmnhit"))) - .withParameters(mapOf("gdzdvyljubvfj", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datagcem"), "difnivlutgg", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataf"), "oiyygkts", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datacxauhvcgzxhklsqx"))) - .withAnnotations(Arrays.asList("dataxxoxwfzbkv")) - .withFolder(new DatasetFolder().withName("xfxp")) - .withTableName("datamotahbqsvnk"); - model = BinaryData.fromObject(model).toObject(MicrosoftAccessTableDataset.class); - Assertions.assertEquals("ytzuaedrl", model.description()); - Assertions.assertEquals("yftgpqoswgfqv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gdzdvyljubvfj").type()); - Assertions.assertEquals("xfxp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java deleted file mode 100644 index fb750f083430..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MicrosoftAccessTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MicrosoftAccessTableDatasetTypeProperties; - -public final class MicrosoftAccessTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MicrosoftAccessTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datae\"}") - .toObject(MicrosoftAccessTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MicrosoftAccessTableDatasetTypeProperties model - = new MicrosoftAccessTableDatasetTypeProperties().withTableName("datae"); - model = BinaryData.fromObject(model).toObject(MicrosoftAccessTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java deleted file mode 100644 index aaa09c353e03..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MongoDbAtlasCollectionDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MongoDbAtlasCollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasCollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasCollection\",\"typeProperties\":{\"collection\":\"dataidzr\"},\"description\":\"sgloi\",\"structure\":\"dataslvivqsu\",\"schema\":\"dataten\",\"linkedServiceName\":{\"referenceName\":\"gpijpk\",\"parameters\":{\"xukuicjuftekio\":\"dataoa\"}},\"parameters\":{\"zubfjzabbwz\":{\"type\":\"Bool\",\"defaultValue\":\"dataewfhvpxjh\"}},\"annotations\":[\"datauaixcdckix\",\"dataps\",\"dataigavk\",\"datavyxzer\"],\"folder\":{\"name\":\"kpzjbyetjxryopt\"},\"\":{\"bpemnrrabovr\":\"datatwhlbecgih\",\"pskpeswyhhmif\":\"datawxywpjhspboxhif\",\"y\":\"datauajxwwvcmmpeg\"}}") - .toObject(MongoDbAtlasCollectionDataset.class); - Assertions.assertEquals("sgloi", model.description()); - Assertions.assertEquals("gpijpk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); - Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasCollectionDataset model = new MongoDbAtlasCollectionDataset().withDescription("sgloi") - .withStructure("dataslvivqsu") - .withSchema("dataten") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gpijpk") - .withParameters(mapOf("xukuicjuftekio", "dataoa"))) - .withParameters(mapOf("zubfjzabbwz", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataewfhvpxjh"))) - .withAnnotations(Arrays.asList("datauaixcdckix", "dataps", "dataigavk", "datavyxzer")) - .withFolder(new DatasetFolder().withName("kpzjbyetjxryopt")) - .withCollection("dataidzr"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasCollectionDataset.class); - Assertions.assertEquals("sgloi", model.description()); - Assertions.assertEquals("gpijpk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zubfjzabbwz").type()); - Assertions.assertEquals("kpzjbyetjxryopt", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 2227a8196e82..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasCollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasCollectionDatasetTypeProperties; - -public final class MongoDbAtlasCollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasCollectionDatasetTypeProperties model = BinaryData.fromString("{\"collection\":\"dataqvcml\"}") - .toObject(MongoDbAtlasCollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasCollectionDatasetTypeProperties model - = new MongoDbAtlasCollectionDatasetTypeProperties().withCollection("dataqvcml"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasCollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java deleted file mode 100644 index a5a1931c5820..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.MongoDbAtlasLinkedService; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MongoDbAtlasLinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasLinkedService model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlas\",\"typeProperties\":{\"connectionString\":\"datazeq\",\"database\":\"datactpqnofkw\",\"driverVersion\":\"datayzwfyfdb\"},\"connectVia\":{\"referenceName\":\"oelmikdsq\",\"parameters\":{\"wjwsmnwbmacv\":\"datagjjsmvsiyqml\"}},\"description\":\"mriyzyvquesxp\",\"parameters\":{\"lxxhcynnmv\":{\"type\":\"SecureString\",\"defaultValue\":\"databul\"},\"pwcidsjqcqyzmrtf\":{\"type\":\"String\",\"defaultValue\":\"datavkwq\"},\"ahwzagvai\":{\"type\":\"Float\",\"defaultValue\":\"datapryyjlikalbcyu\"},\"smrw\":{\"type\":\"Bool\",\"defaultValue\":\"dataephnhnuhgyfzkh\"}},\"annotations\":[\"datai\",\"datarjbpertjpair\"],\"\":{\"tvqopugrse\":\"datamcgiwsywpe\",\"dmcbc\":\"datagiuztqefzypul\"}}") - .toObject(MongoDbAtlasLinkedService.class); - Assertions.assertEquals("oelmikdsq", model.connectVia().referenceName()); - Assertions.assertEquals("mriyzyvquesxp", model.description()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("lxxhcynnmv").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasLinkedService model = new MongoDbAtlasLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("oelmikdsq") - .withParameters(mapOf("wjwsmnwbmacv", "datagjjsmvsiyqml"))) - .withDescription("mriyzyvquesxp") - .withParameters(mapOf("lxxhcynnmv", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("databul"), - "pwcidsjqcqyzmrtf", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datavkwq"), "ahwzagvai", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapryyjlikalbcyu"), - "smrw", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataephnhnuhgyfzkh"))) - .withAnnotations(Arrays.asList("datai", "datarjbpertjpair")) - .withConnectionString("datazeq") - .withDatabase("datactpqnofkw") - .withDriverVersion("datayzwfyfdb"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasLinkedService.class); - Assertions.assertEquals("oelmikdsq", model.connectVia().referenceName()); - Assertions.assertEquals("mriyzyvquesxp", model.description()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("lxxhcynnmv").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java deleted file mode 100644 index cd47a0abdc94..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasLinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MongoDbAtlasLinkedServiceTypeProperties; - -public final class MongoDbAtlasLinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"connectionString\":\"datandidhuepikwcxoa\",\"database\":\"datagukqmkiynbfvkiwm\",\"driverVersion\":\"dataw\"}") - .toObject(MongoDbAtlasLinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasLinkedServiceTypeProperties model - = new MongoDbAtlasLinkedServiceTypeProperties().withConnectionString("datandidhuepikwcxoa") - .withDatabase("datagukqmkiynbfvkiwm") - .withDriverVersion("dataw"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasLinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java deleted file mode 100644 index 50af35f6201e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbAtlasSink; - -public final class MongoDbAtlasSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasSink model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasSink\",\"writeBehavior\":\"datapmathiydmkyvsx\",\"writeBatchSize\":\"dataivghajpddgfozn\",\"writeBatchTimeout\":\"datamkpjoesozcuhunm\",\"sinkRetryCount\":\"databmwptdrrruy\",\"sinkRetryWait\":\"dataoiumuxna\",\"maxConcurrentConnections\":\"datavgmckxh\",\"disableMetricsCollection\":\"datazsmpoiu\",\"\":{\"vo\":\"datatvpbiojncgjog\"}}") - .toObject(MongoDbAtlasSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasSink model = new MongoDbAtlasSink().withWriteBatchSize("dataivghajpddgfozn") - .withWriteBatchTimeout("datamkpjoesozcuhunm") - .withSinkRetryCount("databmwptdrrruy") - .withSinkRetryWait("dataoiumuxna") - .withMaxConcurrentConnections("datavgmckxh") - .withDisableMetricsCollection("datazsmpoiu") - .withWriteBehavior("datapmathiydmkyvsx"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java deleted file mode 100644 index 89cc86dfc12a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbAtlasSourceTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbAtlasSource; -import com.azure.resourcemanager.datafactory.models.MongoDbCursorMethodsProperties; -import java.util.HashMap; -import java.util.Map; - -public final class MongoDbAtlasSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbAtlasSource model = BinaryData.fromString( - "{\"type\":\"MongoDbAtlasSource\",\"filter\":\"dataruwizqvgado\",\"cursorMethods\":{\"project\":\"datanglzjhaqxf\",\"sort\":\"datasptce\",\"skip\":\"datavra\",\"limit\":\"datayfhzybjrxen\",\"\":{\"cy\":\"dataanlb\",\"bwxhlqioqh\":\"datawrcjta\",\"yzluilzgpg\":\"dataxcg\"}},\"batchSize\":\"dataakzmnxjnqmajsl\",\"queryTimeout\":\"datajlpbxvp\",\"additionalColumns\":\"dataup\",\"sourceRetryCount\":\"datajrwpoxuy\",\"sourceRetryWait\":\"datayoyjptkyfrkzg\",\"maxConcurrentConnections\":\"datawyqkkd\",\"disableMetricsCollection\":\"dataxdrgim\",\"\":{\"nl\":\"dataffybo\",\"hhgnu\":\"datavfundkhdmyxmsbt\",\"u\":\"datacbjxgjudgbwr\",\"mgsm\":\"datauzlfqhzihlzljqc\"}}") - .toObject(MongoDbAtlasSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbAtlasSource model = new MongoDbAtlasSource().withSourceRetryCount("datajrwpoxuy") - .withSourceRetryWait("datayoyjptkyfrkzg") - .withMaxConcurrentConnections("datawyqkkd") - .withDisableMetricsCollection("dataxdrgim") - .withFilter("dataruwizqvgado") - .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("datanglzjhaqxf") - .withSort("datasptce") - .withSkip("datavra") - .withLimit("datayfhzybjrxen") - .withAdditionalProperties(mapOf())) - .withBatchSize("dataakzmnxjnqmajsl") - .withQueryTimeout("datajlpbxvp") - .withAdditionalColumns("dataup"); - model = BinaryData.fromObject(model).toObject(MongoDbAtlasSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java deleted file mode 100644 index c70938d1fce2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MongoDbCollectionDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MongoDbCollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbCollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbCollection\",\"typeProperties\":{\"collectionName\":\"datakgenjqnnpjwkosn\"},\"description\":\"igfoujjcxgdqmr\",\"structure\":\"datan\",\"schema\":\"datawopswnyinxuprrx\",\"linkedServiceName\":{\"referenceName\":\"xwjezbfqployu\",\"parameters\":{\"svjmnsvujnjk\":\"datacpvuft\",\"se\":\"datavolefcj\",\"otfbjampqoclann\":\"datakdb\",\"zstrktgvpatrg\":\"dataxynlsuqb\"}},\"parameters\":{\"fhok\":{\"type\":\"Object\",\"defaultValue\":\"datai\"},\"uookrjfls\":{\"type\":\"SecureString\",\"defaultValue\":\"dataculwkqic\"},\"pqsdoc\":{\"type\":\"Array\",\"defaultValue\":\"datajbt\"},\"cdsgxceluji\":{\"type\":\"Array\",\"defaultValue\":\"datapujzfl\"}},\"annotations\":[\"datalu\",\"datanxhfwlfxzfwu\"],\"folder\":{\"name\":\"pc\"},\"\":{\"gsytqpdzfyxcn\":\"datacexkgrvfpsjdm\",\"qfbifo\":\"dataawoxcgzbe\",\"zqjkrf\":\"datafjxdwdrp\",\"pgayiawohfmhnnzm\":\"databwotfcuuugtji\"}}") - .toObject(MongoDbCollectionDataset.class); - Assertions.assertEquals("igfoujjcxgdqmr", model.description()); - Assertions.assertEquals("xwjezbfqployu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fhok").type()); - Assertions.assertEquals("pc", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbCollectionDataset model = new MongoDbCollectionDataset().withDescription("igfoujjcxgdqmr") - .withStructure("datan") - .withSchema("datawopswnyinxuprrx") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("xwjezbfqployu") - .withParameters(mapOf("svjmnsvujnjk", "datacpvuft", "se", "datavolefcj", "otfbjampqoclann", "datakdb", - "zstrktgvpatrg", "dataxynlsuqb"))) - .withParameters(mapOf("fhok", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datai"), "uookrjfls", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataculwkqic"), - "pqsdoc", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datajbt"), - "cdsgxceluji", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datapujzfl"))) - .withAnnotations(Arrays.asList("datalu", "datanxhfwlfxzfwu")) - .withFolder(new DatasetFolder().withName("pc")) - .withCollectionName("datakgenjqnnpjwkosn"); - model = BinaryData.fromObject(model).toObject(MongoDbCollectionDataset.class); - Assertions.assertEquals("igfoujjcxgdqmr", model.description()); - Assertions.assertEquals("xwjezbfqployu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("fhok").type()); - Assertions.assertEquals("pc", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 07ab62a9ac10..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MongoDbCollectionDatasetTypeProperties; - -public final class MongoDbCollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbCollectionDatasetTypeProperties model = BinaryData.fromString("{\"collectionName\":\"datacjjkmqenh\"}") - .toObject(MongoDbCollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbCollectionDatasetTypeProperties model - = new MongoDbCollectionDatasetTypeProperties().withCollectionName("datacjjkmqenh"); - model = BinaryData.fromObject(model).toObject(MongoDbCollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java deleted file mode 100644 index f4e6762d659e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbCursorMethodsPropertiesTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbCursorMethodsProperties; -import java.util.HashMap; -import java.util.Map; - -public final class MongoDbCursorMethodsPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbCursorMethodsProperties model = BinaryData.fromString( - "{\"project\":\"datajunqwkjfmtuybdzr\",\"sort\":\"datackxennzowguirh\",\"skip\":\"datajpw\",\"limit\":\"datamktpykoicpk\",\"\":{\"jaof\":\"dataqfdtbao\",\"rwsj\":\"datacvhhrgvkuuikrsie\",\"vygrfyyknxuacfm\":\"datadxenxjvapdqg\",\"kt\":\"dataynlcimjmurocryfu\"}}") - .toObject(MongoDbCursorMethodsProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbCursorMethodsProperties model = new MongoDbCursorMethodsProperties().withProject("datajunqwkjfmtuybdzr") - .withSort("datackxennzowguirh") - .withSkip("datajpw") - .withLimit("datamktpykoicpk") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(MongoDbCursorMethodsProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java deleted file mode 100644 index 1c6311b70c41..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbSource; - -public final class MongoDbSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbSource model = BinaryData.fromString( - "{\"type\":\"MongoDbSource\",\"query\":\"datagj\",\"additionalColumns\":\"dataxqoydyislepdbsi\",\"sourceRetryCount\":\"datantsp\",\"sourceRetryWait\":\"dataumpyytbjbmjbmtx\",\"maxConcurrentConnections\":\"datawflkgeqotvocj\",\"disableMetricsCollection\":\"dataihnw\",\"\":{\"tfnbvtxqqlb\":\"datarsgfd\",\"a\":\"dataiqbd\",\"lxwsfdd\":\"datajxcdhp\",\"wmjsurhljjzsj\":\"dataqpfynt\"}}") - .toObject(MongoDbSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbSource model = new MongoDbSource().withSourceRetryCount("datantsp") - .withSourceRetryWait("dataumpyytbjbmjbmtx") - .withMaxConcurrentConnections("datawflkgeqotvocj") - .withDisableMetricsCollection("dataihnw") - .withQuery("datagj") - .withAdditionalColumns("dataxqoydyislepdbsi"); - model = BinaryData.fromObject(model).toObject(MongoDbSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java deleted file mode 100644 index b708c1b3b80d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MongoDbV2CollectionDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MongoDbV2CollectionDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2CollectionDataset model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Collection\",\"typeProperties\":{\"collection\":\"dataexbzbqufpnezsjza\"},\"description\":\"ldrorhyogzmsimeh\",\"structure\":\"datauuwdhtq\",\"schema\":\"datayhn\",\"linkedServiceName\":{\"referenceName\":\"m\",\"parameters\":{\"mwnwnghojovkey\":\"datasugisn\",\"crtnuguefxxi\":\"datamicjixxfsf\"}},\"parameters\":{\"epfgsvbbvaqdl\":{\"type\":\"Array\",\"defaultValue\":\"dataveywetkrhlolmcn\"},\"huekdxljzvdovb\":{\"type\":\"SecureString\",\"defaultValue\":\"dataetlrnrdetawevx\"}},\"annotations\":[\"dataerlpr\",\"dataaqccddcbnyg\"],\"folder\":{\"name\":\"cxwbp\"},\"\":{\"lrzndas\":\"datakdigq\",\"iqlarhqtwvcaze\":\"datanidmjqmvytg\",\"dzffzjwztsmpchg\":\"datad\",\"rrkdknczgor\":\"dataryelgfyatigfg\"}}") - .toObject(MongoDbV2CollectionDataset.class); - Assertions.assertEquals("ldrorhyogzmsimeh", model.description()); - Assertions.assertEquals("m", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("epfgsvbbvaqdl").type()); - Assertions.assertEquals("cxwbp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2CollectionDataset model = new MongoDbV2CollectionDataset().withDescription("ldrorhyogzmsimeh") - .withStructure("datauuwdhtq") - .withSchema("datayhn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("m") - .withParameters(mapOf("mwnwnghojovkey", "datasugisn", "crtnuguefxxi", "datamicjixxfsf"))) - .withParameters(mapOf("epfgsvbbvaqdl", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataveywetkrhlolmcn"), - "huekdxljzvdovb", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataetlrnrdetawevx"))) - .withAnnotations(Arrays.asList("dataerlpr", "dataaqccddcbnyg")) - .withFolder(new DatasetFolder().withName("cxwbp")) - .withCollection("dataexbzbqufpnezsjza"); - model = BinaryData.fromObject(model).toObject(MongoDbV2CollectionDataset.class); - Assertions.assertEquals("ldrorhyogzmsimeh", model.description()); - Assertions.assertEquals("m", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("epfgsvbbvaqdl").type()); - Assertions.assertEquals("cxwbp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java deleted file mode 100644 index 8bd4f273239e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2CollectionDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2CollectionDatasetTypeProperties; - -public final class MongoDbV2CollectionDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2CollectionDatasetTypeProperties model = BinaryData.fromString("{\"collection\":\"datawnvojtvmdev\"}") - .toObject(MongoDbV2CollectionDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2CollectionDatasetTypeProperties model - = new MongoDbV2CollectionDatasetTypeProperties().withCollection("datawnvojtvmdev"); - model = BinaryData.fromObject(model).toObject(MongoDbV2CollectionDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java deleted file mode 100644 index b17bad880fe5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTests.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.MongoDbV2LinkedService; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MongoDbV2LinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2LinkedService model = BinaryData.fromString( - "{\"type\":\"MongoDbV2\",\"typeProperties\":{\"connectionString\":\"datay\",\"database\":\"datacct\"},\"connectVia\":{\"referenceName\":\"gy\",\"parameters\":{\"zhdmcg\":\"datax\"}},\"description\":\"brybfarkhko\",\"parameters\":{\"jjhq\":{\"type\":\"Float\",\"defaultValue\":\"datawmoyhd\"}},\"annotations\":[\"databjfpxo\",\"datagnm\",\"dataniqw\"],\"\":{\"sstnwvravntvk\":\"datayxfknj\"}}") - .toObject(MongoDbV2LinkedService.class); - Assertions.assertEquals("gy", model.connectVia().referenceName()); - Assertions.assertEquals("brybfarkhko", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jjhq").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2LinkedService model = new MongoDbV2LinkedService() - .withConnectVia( - new IntegrationRuntimeReference().withReferenceName("gy").withParameters(mapOf("zhdmcg", "datax"))) - .withDescription("brybfarkhko") - .withParameters(mapOf("jjhq", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datawmoyhd"))) - .withAnnotations(Arrays.asList("databjfpxo", "datagnm", "dataniqw")) - .withConnectionString("datay") - .withDatabase("datacct"); - model = BinaryData.fromObject(model).toObject(MongoDbV2LinkedService.class); - Assertions.assertEquals("gy", model.connectVia().referenceName()); - Assertions.assertEquals("brybfarkhko", model.description()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("jjhq").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java deleted file mode 100644 index ae9ac5092155..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2LinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MongoDbV2LinkedServiceTypeProperties; - -public final class MongoDbV2LinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2LinkedServiceTypeProperties model - = BinaryData.fromString("{\"connectionString\":\"datakwqi\",\"database\":\"datanlpaymketotk\"}") - .toObject(MongoDbV2LinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2LinkedServiceTypeProperties model - = new MongoDbV2LinkedServiceTypeProperties().withConnectionString("datakwqi") - .withDatabase("datanlpaymketotk"); - model = BinaryData.fromObject(model).toObject(MongoDbV2LinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java deleted file mode 100644 index a34c97f08212..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbV2Sink; - -public final class MongoDbV2SinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2Sink model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Sink\",\"writeBehavior\":\"dataekmgpseassdqpwhp\",\"writeBatchSize\":\"datadosfgbvsozjf\",\"writeBatchTimeout\":\"datawxcjciotlbpuemqe\",\"sinkRetryCount\":\"datao\",\"sinkRetryWait\":\"datavhhedc\",\"maxConcurrentConnections\":\"datalycrldwccas\",\"disableMetricsCollection\":\"databdvsorvhbygw\",\"\":{\"wncggamxbtq\":\"dataqlzzkb\",\"qhbnwmok\":\"datazydaiolnkkghlex\"}}") - .toObject(MongoDbV2Sink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2Sink model = new MongoDbV2Sink().withWriteBatchSize("datadosfgbvsozjf") - .withWriteBatchTimeout("datawxcjciotlbpuemqe") - .withSinkRetryCount("datao") - .withSinkRetryWait("datavhhedc") - .withMaxConcurrentConnections("datalycrldwccas") - .withDisableMetricsCollection("databdvsorvhbygw") - .withWriteBehavior("dataekmgpseassdqpwhp"); - model = BinaryData.fromObject(model).toObject(MongoDbV2Sink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java deleted file mode 100644 index 5ca559c1b765..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MongoDbV2SourceTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MongoDbCursorMethodsProperties; -import com.azure.resourcemanager.datafactory.models.MongoDbV2Source; -import java.util.HashMap; -import java.util.Map; - -public final class MongoDbV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MongoDbV2Source model = BinaryData.fromString( - "{\"type\":\"MongoDbV2Source\",\"filter\":\"datazuzvbqbroyrw\",\"cursorMethods\":{\"project\":\"databfweozkbokffsu\",\"sort\":\"datacslzca\",\"skip\":\"datad\",\"limit\":\"datafwkpupbsgfnqtxl\",\"\":{\"cist\":\"dataviklxsgstunsa\",\"ajkodpz\":\"databehkb\",\"faas\":\"datatgsazwx\",\"cdl\":\"datahasjbuhz\"}},\"batchSize\":\"datajstncjwze\",\"queryTimeout\":\"dataezltlundkjphvh\",\"additionalColumns\":\"dataivsh\",\"sourceRetryCount\":\"datax\",\"sourceRetryWait\":\"datajythxearlpnajjt\",\"maxConcurrentConnections\":\"datalyd\",\"disableMetricsCollection\":\"datauxbungmpnrytguc\",\"\":{\"cjugoa\":\"datagl\",\"uzanpoyrqjoni\":\"datazyzjc\",\"cloq\":\"datanyhzestt\"}}") - .toObject(MongoDbV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MongoDbV2Source model = new MongoDbV2Source().withSourceRetryCount("datax") - .withSourceRetryWait("datajythxearlpnajjt") - .withMaxConcurrentConnections("datalyd") - .withDisableMetricsCollection("datauxbungmpnrytguc") - .withFilter("datazuzvbqbroyrw") - .withCursorMethods(new MongoDbCursorMethodsProperties().withProject("databfweozkbokffsu") - .withSort("datacslzca") - .withSkip("datad") - .withLimit("datafwkpupbsgfnqtxl") - .withAdditionalProperties(mapOf())) - .withBatchSize("datajstncjwze") - .withQueryTimeout("dataezltlundkjphvh") - .withAdditionalColumns("dataivsh"); - model = BinaryData.fromObject(model).toObject(MongoDbV2Source.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java deleted file mode 100644 index 824dd752bb50..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MultiplePipelineTriggerTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MultiplePipelineTrigger; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MultiplePipelineTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MultiplePipelineTrigger model = BinaryData.fromString( - "{\"type\":\"MultiplePipelineTrigger\",\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"tknpb\",\"name\":\"tkwstumjtg\"},\"parameters\":{\"m\":\"datawpsnldjjgrebbonj\",\"nsmtgza\":\"datafseykprgpqnesu\"}},{\"pipelineReference\":{\"referenceName\":\"whldxbvrytthzs\",\"name\":\"ugzs\"},\"parameters\":{\"s\":\"datakevvqetvcxabzweh\"}},{\"pipelineReference\":{\"referenceName\":\"m\",\"name\":\"rhiaomldtkqoajp\"},\"parameters\":{\"uiroz\":\"datarafhzi\"}}],\"description\":\"sdznnhhjdfyu\",\"runtimeState\":\"Started\",\"annotations\":[\"datambhaumpwgtero\",\"dataenvjouzjk\",\"dataxbraqzrbvogfmp\"],\"\":{\"odrqajxm\":\"datarynlqnklbwyqoy\",\"vjondoaiydj\":\"dataxspbjerokbdk\",\"l\":\"datakdjm\"}}") - .toObject(MultiplePipelineTrigger.class); - Assertions.assertEquals("sdznnhhjdfyu", model.description()); - Assertions.assertEquals("tknpb", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("tkwstumjtg", model.pipelines().get(0).pipelineReference().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MultiplePipelineTrigger model - = new MultiplePipelineTrigger().withDescription("sdznnhhjdfyu") - .withAnnotations(Arrays.asList("datambhaumpwgtero", "dataenvjouzjk", "dataxbraqzrbvogfmp")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("tknpb").withName("tkwstumjtg")) - .withParameters(mapOf("m", "datawpsnldjjgrebbonj", "nsmtgza", "datafseykprgpqnesu")), - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("whldxbvrytthzs").withName("ugzs")) - .withParameters(mapOf("s", "datakevvqetvcxabzweh")), - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("m").withName("rhiaomldtkqoajp")) - .withParameters(mapOf("uiroz", "datarafhzi")))); - model = BinaryData.fromObject(model).toObject(MultiplePipelineTrigger.class); - Assertions.assertEquals("sdznnhhjdfyu", model.description()); - Assertions.assertEquals("tknpb", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("tkwstumjtg", model.pipelines().get(0).pipelineReference().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java deleted file mode 100644 index d61a2ff70e8e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MySqlSource; - -public final class MySqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MySqlSource model = BinaryData.fromString( - "{\"type\":\"MySqlSource\",\"query\":\"dataeedjnklvbrsxykwb\",\"queryTimeout\":\"datasd\",\"additionalColumns\":\"datajitlqxpsnnnxhgd\",\"sourceRetryCount\":\"datadxylndbgaicj\",\"sourceRetryWait\":\"datajzfzkh\",\"maxConcurrentConnections\":\"datapnmrxjdfk\",\"disableMetricsCollection\":\"datakaipfyv\",\"\":{\"ah\":\"datasvywkbiekyvak\",\"oxvoaoavezwcl\":\"datapgnapkpaie\",\"vhcbu\":\"datazmlrvlghlrcdiq\",\"jtockgqaawyyszwo\":\"dataawifz\"}}") - .toObject(MySqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MySqlSource model = new MySqlSource().withSourceRetryCount("datadxylndbgaicj") - .withSourceRetryWait("datajzfzkh") - .withMaxConcurrentConnections("datapnmrxjdfk") - .withDisableMetricsCollection("datakaipfyv") - .withQueryTimeout("datasd") - .withAdditionalColumns("datajitlqxpsnnnxhgd") - .withQuery("dataeedjnklvbrsxykwb"); - model = BinaryData.fromObject(model).toObject(MySqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java deleted file mode 100644 index 27fa5fe84fdb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.MySqlTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class MySqlTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MySqlTableDataset model = BinaryData.fromString( - "{\"type\":\"MySqlTable\",\"typeProperties\":{\"tableName\":\"datanqvxgvohd\"},\"description\":\"hhxmoevvud\",\"structure\":\"datapfhga\",\"schema\":\"datavwxqhpjhu\",\"linkedServiceName\":{\"referenceName\":\"ohxvzgaybvrhhoga\",\"parameters\":{\"uebpamq\":\"datanwfmzvzt\",\"eum\":\"datafcssanybzzghvd\",\"hookju\":\"datajsvcdhlyw\"}},\"parameters\":{\"apcqksaaapxjh\":{\"type\":\"Object\",\"defaultValue\":\"datariwgoe\"},\"hl\":{\"type\":\"Bool\",\"defaultValue\":\"dataibenwsdfpvda\"}},\"annotations\":[\"dataqklpmvzp\",\"datareszyaqgom\",\"databmfggeokfekcjjlw\"],\"folder\":{\"name\":\"ahhhutp\"},\"\":{\"nowrerjpx\":\"datarfcqufmcihp\",\"qqidqim\":\"datatchdw\"}}") - .toObject(MySqlTableDataset.class); - Assertions.assertEquals("hhxmoevvud", model.description()); - Assertions.assertEquals("ohxvzgaybvrhhoga", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("apcqksaaapxjh").type()); - Assertions.assertEquals("ahhhutp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MySqlTableDataset model = new MySqlTableDataset().withDescription("hhxmoevvud") - .withStructure("datapfhga") - .withSchema("datavwxqhpjhu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ohxvzgaybvrhhoga") - .withParameters( - mapOf("uebpamq", "datanwfmzvzt", "eum", "datafcssanybzzghvd", "hookju", "datajsvcdhlyw"))) - .withParameters(mapOf("apcqksaaapxjh", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datariwgoe"), "hl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataibenwsdfpvda"))) - .withAnnotations(Arrays.asList("dataqklpmvzp", "datareszyaqgom", "databmfggeokfekcjjlw")) - .withFolder(new DatasetFolder().withName("ahhhutp")) - .withTableName("datanqvxgvohd"); - model = BinaryData.fromObject(model).toObject(MySqlTableDataset.class); - Assertions.assertEquals("hhxmoevvud", model.description()); - Assertions.assertEquals("ohxvzgaybvrhhoga", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("apcqksaaapxjh").type()); - Assertions.assertEquals("ahhhutp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java deleted file mode 100644 index e37b256bcf35..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/MySqlTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.MySqlTableDatasetTypeProperties; - -public final class MySqlTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MySqlTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"databbfjmd\"}").toObject(MySqlTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MySqlTableDatasetTypeProperties model = new MySqlTableDatasetTypeProperties().withTableName("databbfjmd"); - model = BinaryData.fromObject(model).toObject(MySqlTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java deleted file mode 100644 index e644e8f03d09..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaPartitionSettingsTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.NetezzaPartitionSettings; - -public final class NetezzaPartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - NetezzaPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"dataqjdoglec\",\"partitionUpperBound\":\"datagyivsiirx\",\"partitionLowerBound\":\"datappqpsiniidaxbesb\"}") - .toObject(NetezzaPartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - NetezzaPartitionSettings model = new NetezzaPartitionSettings().withPartitionColumnName("dataqjdoglec") - .withPartitionUpperBound("datagyivsiirx") - .withPartitionLowerBound("datappqpsiniidaxbesb"); - model = BinaryData.fromObject(model).toObject(NetezzaPartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java deleted file mode 100644 index 921aaf113c67..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaSourceTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.NetezzaPartitionSettings; -import com.azure.resourcemanager.datafactory.models.NetezzaSource; - -public final class NetezzaSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - NetezzaSource model = BinaryData.fromString( - "{\"type\":\"NetezzaSource\",\"query\":\"datarnknnql\",\"partitionOption\":\"datagyeyxmuwgnwxtm\",\"partitionSettings\":{\"partitionColumnName\":\"dataencmos\",\"partitionUpperBound\":\"dataxlgz\",\"partitionLowerBound\":\"dataqxewsvqpifza\"},\"queryTimeout\":\"datatywap\",\"additionalColumns\":\"dataczprzrsqcu\",\"sourceRetryCount\":\"datanp\",\"sourceRetryWait\":\"dataqlanuhmsrnp\",\"maxConcurrentConnections\":\"dataaghoeqiwpdxpd\",\"disableMetricsCollection\":\"dataoajqxyplhsto\",\"\":{\"svpi\":\"databwl\"}}") - .toObject(NetezzaSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - NetezzaSource model = new NetezzaSource().withSourceRetryCount("datanp") - .withSourceRetryWait("dataqlanuhmsrnp") - .withMaxConcurrentConnections("dataaghoeqiwpdxpd") - .withDisableMetricsCollection("dataoajqxyplhsto") - .withQueryTimeout("datatywap") - .withAdditionalColumns("dataczprzrsqcu") - .withQuery("datarnknnql") - .withPartitionOption("datagyeyxmuwgnwxtm") - .withPartitionSettings(new NetezzaPartitionSettings().withPartitionColumnName("dataencmos") - .withPartitionUpperBound("dataxlgz") - .withPartitionLowerBound("dataqxewsvqpifza")); - model = BinaryData.fromObject(model).toObject(NetezzaSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java deleted file mode 100644 index 8711dd025269..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.NetezzaTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class NetezzaTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - NetezzaTableDataset model = BinaryData.fromString( - "{\"type\":\"NetezzaTable\",\"typeProperties\":{\"tableName\":\"datafvkywzrqeiad\",\"table\":\"datakhuvnl\",\"schema\":\"datacnuti\"},\"description\":\"mizunzbqvioync\",\"structure\":\"dataqhhvvwz\",\"schema\":\"datajaaaiaibtvavly\",\"linkedServiceName\":{\"referenceName\":\"qtlocnwmef\",\"parameters\":{\"bzgy\":\"datauzqcrlkor\",\"nozf\":\"dataenfsfyqncowm\",\"agwaakktbjort\":\"dataywjiaaosla\"}},\"parameters\":{\"zbkd\":{\"type\":\"Bool\",\"defaultValue\":\"dataqhsnsejplislxyl\"},\"rpea\":{\"type\":\"Object\",\"defaultValue\":\"datajwxgvtkjct\"},\"aitrms\":{\"type\":\"Float\",\"defaultValue\":\"datakvfccozvqxspht\"},\"poegyckm\":{\"type\":\"SecureString\",\"defaultValue\":\"datatuytgcptct\"}},\"annotations\":[\"datavrcclclfkfv\",\"dataj\"],\"folder\":{\"name\":\"wrvp\"},\"\":{\"b\":\"datajylxt\",\"aysqwh\":\"datasewfzvv\",\"tcvpvdfmo\":\"datadcyandblkb\",\"sqpffapjpjmsbzz\":\"dataqctfvxu\"}}") - .toObject(NetezzaTableDataset.class); - Assertions.assertEquals("mizunzbqvioync", model.description()); - Assertions.assertEquals("qtlocnwmef", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zbkd").type()); - Assertions.assertEquals("wrvp", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - NetezzaTableDataset model = new NetezzaTableDataset().withDescription("mizunzbqvioync") - .withStructure("dataqhhvvwz") - .withSchema("datajaaaiaibtvavly") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qtlocnwmef") - .withParameters( - mapOf("bzgy", "datauzqcrlkor", "nozf", "dataenfsfyqncowm", "agwaakktbjort", "dataywjiaaosla"))) - .withParameters(mapOf("zbkd", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataqhsnsejplislxyl"), - "rpea", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datajwxgvtkjct"), - "aitrms", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakvfccozvqxspht"), - "poegyckm", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datatuytgcptct"))) - .withAnnotations(Arrays.asList("datavrcclclfkfv", "dataj")) - .withFolder(new DatasetFolder().withName("wrvp")) - .withTableName("datafvkywzrqeiad") - .withTable("datakhuvnl") - .withSchemaTypePropertiesSchema("datacnuti"); - model = BinaryData.fromObject(model).toObject(NetezzaTableDataset.class); - Assertions.assertEquals("mizunzbqvioync", model.description()); - Assertions.assertEquals("qtlocnwmef", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("zbkd").type()); - Assertions.assertEquals("wrvp", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java deleted file mode 100644 index 04fc2dd29402..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NetezzaTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.NetezzaTableDatasetTypeProperties; - -public final class NetezzaTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - NetezzaTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datanyfowyj\",\"table\":\"dataakkiub\",\"schema\":\"datakittlrgl\"}") - .toObject(NetezzaTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - NetezzaTableDatasetTypeProperties model = new NetezzaTableDatasetTypeProperties().withTableName("datanyfowyj") - .withTable("dataakkiub") - .withSchema("datakittlrgl"); - model = BinaryData.fromObject(model).toObject(NetezzaTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java deleted file mode 100644 index 4059fb3e4abf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/NotebookParameterTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.NotebookParameter; -import com.azure.resourcemanager.datafactory.models.NotebookParameterType; -import org.junit.jupiter.api.Assertions; - -public final class NotebookParameterTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - NotebookParameter model - = BinaryData.fromString("{\"value\":\"dataxkdiwpa\",\"type\":\"int\"}").toObject(NotebookParameter.class); - Assertions.assertEquals(NotebookParameterType.INT, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - NotebookParameter model = new NotebookParameter().withValue("dataxkdiwpa").withType(NotebookParameterType.INT); - model = BinaryData.fromObject(model).toObject(NotebookParameter.class); - Assertions.assertEquals(NotebookParameterType.INT, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java deleted file mode 100644 index bac49da8898f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ODataResourceDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ODataResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ODataResourceDataset model = BinaryData.fromString( - "{\"type\":\"ODataResource\",\"typeProperties\":{\"path\":\"dataep\"},\"description\":\"hju\",\"structure\":\"datadgbggcjxzh\",\"schema\":\"dataivwehsudym\",\"linkedServiceName\":{\"referenceName\":\"mbhdo\",\"parameters\":{\"xexatmdmnrs\":\"datangkqlgxzduvxd\"}},\"parameters\":{\"xznntwgkvyohp\":{\"type\":\"String\",\"defaultValue\":\"datarxyddmiploisjkzs\"}},\"annotations\":[\"datazupzwwyt\",\"datadjzghximkg\"],\"folder\":{\"name\":\"pqkjnpyriwntotcx\"},\"\":{\"exwdonbexf\":\"datamtsjkyj\",\"eeggzgrn\":\"dataedaub\"}}") - .toObject(ODataResourceDataset.class); - Assertions.assertEquals("hju", model.description()); - Assertions.assertEquals("mbhdo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("xznntwgkvyohp").type()); - Assertions.assertEquals("pqkjnpyriwntotcx", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ODataResourceDataset model = new ODataResourceDataset().withDescription("hju") - .withStructure("datadgbggcjxzh") - .withSchema("dataivwehsudym") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mbhdo") - .withParameters(mapOf("xexatmdmnrs", "datangkqlgxzduvxd"))) - .withParameters(mapOf("xznntwgkvyohp", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datarxyddmiploisjkzs"))) - .withAnnotations(Arrays.asList("datazupzwwyt", "datadjzghximkg")) - .withFolder(new DatasetFolder().withName("pqkjnpyriwntotcx")) - .withPath("dataep"); - model = BinaryData.fromObject(model).toObject(ODataResourceDataset.class); - Assertions.assertEquals("hju", model.description()); - Assertions.assertEquals("mbhdo", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("xznntwgkvyohp").type()); - Assertions.assertEquals("pqkjnpyriwntotcx", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java deleted file mode 100644 index ef80fca80478..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ODataResourceDatasetTypeProperties; - -public final class ODataResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ODataResourceDatasetTypeProperties model - = BinaryData.fromString("{\"path\":\"datalfozuu\"}").toObject(ODataResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ODataResourceDatasetTypeProperties model = new ODataResourceDatasetTypeProperties().withPath("datalfozuu"); - model = BinaryData.fromObject(model).toObject(ODataResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java deleted file mode 100644 index ec154ee31ec7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ODataSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ODataSource; - -public final class ODataSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ODataSource model = BinaryData.fromString( - "{\"type\":\"ODataSource\",\"query\":\"datawmtsmeaciy\",\"httpRequestTimeout\":\"datahnrgmg\",\"additionalColumns\":\"datacusvidkzbdbvlsn\",\"sourceRetryCount\":\"dataymrfomlh\",\"sourceRetryWait\":\"dataiktecs\",\"maxConcurrentConnections\":\"datacqweydaa\",\"disableMetricsCollection\":\"datattmfcx\",\"\":{\"afqiwlduot\":\"datafsqjxxb\",\"kfzpr\":\"datayjzdas\",\"lbddlnzmff\":\"dataqomuzohnpkof\"}}") - .toObject(ODataSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ODataSource model = new ODataSource().withSourceRetryCount("dataymrfomlh") - .withSourceRetryWait("dataiktecs") - .withMaxConcurrentConnections("datacqweydaa") - .withDisableMetricsCollection("datattmfcx") - .withQuery("datawmtsmeaciy") - .withHttpRequestTimeout("datahnrgmg") - .withAdditionalColumns("datacusvidkzbdbvlsn"); - model = BinaryData.fromObject(model).toObject(ODataSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java deleted file mode 100644 index 0b0959fc06ab..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OdbcSink; - -public final class OdbcSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OdbcSink model = BinaryData.fromString( - "{\"type\":\"OdbcSink\",\"preCopyScript\":\"datavnexnw\",\"writeBatchSize\":\"datawcxaqlym\",\"writeBatchTimeout\":\"datazv\",\"sinkRetryCount\":\"datatecfyusfkcwfpo\",\"sinkRetryWait\":\"datalgkzgzxqwv\",\"maxConcurrentConnections\":\"datakqbgkssygdvll\",\"disableMetricsCollection\":\"datadfulvhpwpsxygrn\",\"\":{\"qgn\":\"dataxpsebazbtyrjr\",\"imtu\":\"datafzrra\",\"kve\":\"datajqzgyymyywhfdkj\",\"mccqljrnveq\":\"datasoztzdhzkbmzldpl\"}}") - .toObject(OdbcSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OdbcSink model = new OdbcSink().withWriteBatchSize("datawcxaqlym") - .withWriteBatchTimeout("datazv") - .withSinkRetryCount("datatecfyusfkcwfpo") - .withSinkRetryWait("datalgkzgzxqwv") - .withMaxConcurrentConnections("datakqbgkssygdvll") - .withDisableMetricsCollection("datadfulvhpwpsxygrn") - .withPreCopyScript("datavnexnw"); - model = BinaryData.fromObject(model).toObject(OdbcSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java deleted file mode 100644 index 4a25d5bc835a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OdbcSource; - -public final class OdbcSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OdbcSource model = BinaryData.fromString( - "{\"type\":\"OdbcSource\",\"query\":\"dataemzdnvnooklgren\",\"queryTimeout\":\"datasgertxicemgsncbb\",\"additionalColumns\":\"datakphaed\",\"sourceRetryCount\":\"datalbnu\",\"sourceRetryWait\":\"dataxswmccykbvia\",\"maxConcurrentConnections\":\"datapxlxbofdchb\",\"disableMetricsCollection\":\"datacfskzwese\",\"\":{\"ojw\":\"dataqj\",\"gvsljk\":\"dataixtrnakytzcm\",\"hzsx\":\"dataarqhpxw\",\"vbu\":\"datamgvygmtyw\"}}") - .toObject(OdbcSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OdbcSource model = new OdbcSource().withSourceRetryCount("datalbnu") - .withSourceRetryWait("dataxswmccykbvia") - .withMaxConcurrentConnections("datapxlxbofdchb") - .withDisableMetricsCollection("datacfskzwese") - .withQueryTimeout("datasgertxicemgsncbb") - .withAdditionalColumns("datakphaed") - .withQuery("dataemzdnvnooklgren"); - model = BinaryData.fromObject(model).toObject(OdbcSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java deleted file mode 100644 index e5b937510e76..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.OdbcTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class OdbcTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OdbcTableDataset model = BinaryData.fromString( - "{\"type\":\"OdbcTable\",\"typeProperties\":{\"tableName\":\"dataygzkztxfexwacyy\"},\"description\":\"lxppd\",\"structure\":\"datazkf\",\"schema\":\"datauiiu\",\"linkedServiceName\":{\"referenceName\":\"ibfkcjytq\",\"parameters\":{\"qqfopvno\":\"dataz\"}},\"parameters\":{\"zyfbkmvldzmxojz\":{\"type\":\"Float\",\"defaultValue\":\"datasfhoxqlyo\"},\"vhyejthgeecbp\":{\"type\":\"String\",\"defaultValue\":\"dataaigbq\"},\"kknpugzjw\":{\"type\":\"Int\",\"defaultValue\":\"datawlckihbamyqs\"}},\"annotations\":[\"datacrjixiujzkcs\",\"dataxgkrswksykkbxk\",\"dataxbbwlmnwz\"],\"folder\":{\"name\":\"vuqddlgg\"},\"\":{\"gcmfnsffetpkmixw\":\"dataolgzubakdlkv\"}}") - .toObject(OdbcTableDataset.class); - Assertions.assertEquals("lxppd", model.description()); - Assertions.assertEquals("ibfkcjytq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("zyfbkmvldzmxojz").type()); - Assertions.assertEquals("vuqddlgg", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OdbcTableDataset model = new OdbcTableDataset().withDescription("lxppd") - .withStructure("datazkf") - .withSchema("datauiiu") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("ibfkcjytq").withParameters(mapOf("qqfopvno", "dataz"))) - .withParameters(mapOf("zyfbkmvldzmxojz", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datasfhoxqlyo"), - "vhyejthgeecbp", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataaigbq"), "kknpugzjw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datawlckihbamyqs"))) - .withAnnotations(Arrays.asList("datacrjixiujzkcs", "dataxgkrswksykkbxk", "dataxbbwlmnwz")) - .withFolder(new DatasetFolder().withName("vuqddlgg")) - .withTableName("dataygzkztxfexwacyy"); - model = BinaryData.fromObject(model).toObject(OdbcTableDataset.class); - Assertions.assertEquals("lxppd", model.description()); - Assertions.assertEquals("ibfkcjytq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("zyfbkmvldzmxojz").type()); - Assertions.assertEquals("vuqddlgg", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java deleted file mode 100644 index 1cac81b63d36..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OdbcTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.OdbcTableDatasetTypeProperties; - -public final class OdbcTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OdbcTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datazlscgs\"}").toObject(OdbcTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OdbcTableDatasetTypeProperties model = new OdbcTableDatasetTypeProperties().withTableName("datazlscgs"); - model = BinaryData.fromObject(model).toObject(OdbcTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java deleted file mode 100644 index 1f1763c92bb2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.Office365Dataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class Office365DatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Office365Dataset model = BinaryData.fromString( - "{\"type\":\"Office365Table\",\"typeProperties\":{\"tableName\":\"dataefvulblmr\",\"predicate\":\"dataxyprhfcaeooifqdy\"},\"description\":\"flobhahq\",\"structure\":\"datamfecorkfrocgbmxl\",\"schema\":\"datamzezbjesylslu\",\"linkedServiceName\":{\"referenceName\":\"bqfy\",\"parameters\":{\"gdz\":\"datay\"}},\"parameters\":{\"hmgw\":{\"type\":\"Int\",\"defaultValue\":\"datagqyvouprsytqzss\"}},\"annotations\":[\"datavrxpfdu\",\"dataolhg\",\"dataqvpbfjpoqzucfzl\"],\"folder\":{\"name\":\"dquur\"},\"\":{\"elbprn\":\"datamvhvz\",\"svhbngqiwyejto\":\"dataujywzcqyggmn\"}}") - .toObject(Office365Dataset.class); - Assertions.assertEquals("flobhahq", model.description()); - Assertions.assertEquals("bqfy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hmgw").type()); - Assertions.assertEquals("dquur", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Office365Dataset model = new Office365Dataset().withDescription("flobhahq") - .withStructure("datamfecorkfrocgbmxl") - .withSchema("datamzezbjesylslu") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("bqfy").withParameters(mapOf("gdz", "datay"))) - .withParameters(mapOf("hmgw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datagqyvouprsytqzss"))) - .withAnnotations(Arrays.asList("datavrxpfdu", "dataolhg", "dataqvpbfjpoqzucfzl")) - .withFolder(new DatasetFolder().withName("dquur")) - .withTableName("dataefvulblmr") - .withPredicate("dataxyprhfcaeooifqdy"); - model = BinaryData.fromObject(model).toObject(Office365Dataset.class); - Assertions.assertEquals("flobhahq", model.description()); - Assertions.assertEquals("bqfy", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("hmgw").type()); - Assertions.assertEquals("dquur", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java deleted file mode 100644 index 87dfe23e6d09..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365DatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.Office365DatasetTypeProperties; - -public final class Office365DatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Office365DatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datadrrp\",\"predicate\":\"dataehkuns\"}") - .toObject(Office365DatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Office365DatasetTypeProperties model - = new Office365DatasetTypeProperties().withTableName("datadrrp").withPredicate("dataehkuns"); - model = BinaryData.fromObject(model).toObject(Office365DatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java deleted file mode 100644 index b0ce95aa2fbc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/Office365SourceTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Office365Source; - -public final class Office365SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Office365Source model = BinaryData.fromString( - "{\"type\":\"Office365Source\",\"allowedGroups\":\"datadhdtt\",\"userScopeFilterUri\":\"datakeculxvkuxvccpda\",\"dateFilterColumn\":\"dataasi\",\"startTime\":\"datatyvvgxe\",\"endTime\":\"dataqoswjwbh\",\"outputColumns\":\"datawbchybne\",\"sourceRetryCount\":\"dataueikadhusg\",\"sourceRetryWait\":\"databgljcy\",\"maxConcurrentConnections\":\"datarzxipxhlxxkviyj\",\"disableMetricsCollection\":\"dataqyejyavxgm\",\"\":{\"h\":\"datanwxkqqxpnjqtzd\",\"eggyqlvnhmuut\":\"dataethnl\",\"gcbjclfbpfdsatr\":\"datawwtymbccmwsyfsgi\"}}") - .toObject(Office365Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Office365Source model = new Office365Source().withSourceRetryCount("dataueikadhusg") - .withSourceRetryWait("databgljcy") - .withMaxConcurrentConnections("datarzxipxhlxxkviyj") - .withDisableMetricsCollection("dataqyejyavxgm") - .withAllowedGroups("datadhdtt") - .withUserScopeFilterUri("datakeculxvkuxvccpda") - .withDateFilterColumn("dataasi") - .withStartTime("datatyvvgxe") - .withEndTime("dataqoswjwbh") - .withOutputColumns("datawbchybne"); - model = BinaryData.fromObject(model).toObject(Office365Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java deleted file mode 100644 index 6bee94c69be2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationDisplayTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationDisplay; -import org.junit.jupiter.api.Assertions; - -public final class OperationDisplayTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationDisplay model = BinaryData - .fromString( - "{\"description\":\"xinpmqnjaq\",\"provider\":\"xj\",\"resource\":\"r\",\"operation\":\"vcputegj\"}") - .toObject(OperationDisplay.class); - Assertions.assertEquals("xinpmqnjaq", model.description()); - Assertions.assertEquals("xj", model.provider()); - Assertions.assertEquals("r", model.resource()); - Assertions.assertEquals("vcputegj", model.operation()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationDisplay model = new OperationDisplay().withDescription("xinpmqnjaq") - .withProvider("xj") - .withResource("r") - .withOperation("vcputegj"); - model = BinaryData.fromObject(model).toObject(OperationDisplay.class); - Assertions.assertEquals("xinpmqnjaq", model.description()); - Assertions.assertEquals("xj", model.provider()); - Assertions.assertEquals("r", model.resource()); - Assertions.assertEquals("vcputegj", model.operation()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java deleted file mode 100644 index 374710606f40..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationInnerTests.java +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.OperationInner; -import com.azure.resourcemanager.datafactory.models.OperationDisplay; -import com.azure.resourcemanager.datafactory.models.OperationLogSpecification; -import com.azure.resourcemanager.datafactory.models.OperationMetricAvailability; -import com.azure.resourcemanager.datafactory.models.OperationMetricDimension; -import com.azure.resourcemanager.datafactory.models.OperationMetricSpecification; -import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationInner model = BinaryData.fromString( - "{\"name\":\"itjz\",\"origin\":\"lusarh\",\"display\":{\"description\":\"cqhsm\",\"provider\":\"rkdtmlxh\",\"resource\":\"uksjtxukcdmp\",\"operation\":\"cryuan\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{\"name\":\"xtayr\",\"displayName\":\"m\",\"blobDuration\":\"fp\"},{\"name\":\"qobmtukk\",\"displayName\":\"yrtih\",\"blobDuration\":\"tijbpzvgnwzsymgl\"},{\"name\":\"fcyzkohdbihanufh\",\"displayName\":\"bj\",\"blobDuration\":\"a\"},{\"name\":\"th\",\"displayName\":\"hab\",\"blobDuration\":\"pikxwczbyscnpqxu\"}],\"metricSpecifications\":[{\"name\":\"qniwbybrkxvdumj\",\"displayName\":\"tfwvukxgaudc\",\"displayDescription\":\"nhsjcnyej\",\"unit\":\"ryhtnapczwlokjy\",\"aggregationType\":\"kkvnipjox\",\"enableRegionalMdmAccount\":\"nchgej\",\"sourceMdmAccount\":\"odmailzyd\",\"sourceMdmNamespace\":\"o\",\"availabilities\":[{},{}],\"dimensions\":[{},{},{}]}]}}}") - .toObject(OperationInner.class); - Assertions.assertEquals("itjz", model.name()); - Assertions.assertEquals("lusarh", model.origin()); - Assertions.assertEquals("cqhsm", model.display().description()); - Assertions.assertEquals("rkdtmlxh", model.display().provider()); - Assertions.assertEquals("uksjtxukcdmp", model.display().resource()); - Assertions.assertEquals("cryuan", model.display().operation()); - Assertions.assertEquals("xtayr", model.serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("m", model.serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("fp", model.serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("qniwbybrkxvdumj", model.serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("tfwvukxgaudc", - model.serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("nhsjcnyej", - model.serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("ryhtnapczwlokjy", model.serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("kkvnipjox", - model.serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("nchgej", - model.serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("odmailzyd", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("o", model.serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationInner model = new OperationInner().withName("itjz") - .withOrigin("lusarh") - .withDisplay(new OperationDisplay().withDescription("cqhsm") - .withProvider("rkdtmlxh") - .withResource("uksjtxukcdmp") - .withOperation("cryuan")) - .withServiceSpecification(new OperationServiceSpecification() - .withLogSpecifications(Arrays.asList( - new OperationLogSpecification().withName("xtayr").withDisplayName("m").withBlobDuration("fp"), - new OperationLogSpecification().withName("qobmtukk") - .withDisplayName("yrtih") - .withBlobDuration("tijbpzvgnwzsymgl"), - new OperationLogSpecification().withName("fcyzkohdbihanufh") - .withDisplayName("bj") - .withBlobDuration("a"), - new OperationLogSpecification().withName("th") - .withDisplayName("hab") - .withBlobDuration("pikxwczbyscnpqxu"))) - .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification().withName("qniwbybrkxvdumj") - .withDisplayName("tfwvukxgaudc") - .withDisplayDescription("nhsjcnyej") - .withUnit("ryhtnapczwlokjy") - .withAggregationType("kkvnipjox") - .withEnableRegionalMdmAccount("nchgej") - .withSourceMdmAccount("odmailzyd") - .withSourceMdmNamespace("o") - .withAvailabilities( - Arrays.asList(new OperationMetricAvailability(), new OperationMetricAvailability())) - .withDimensions(Arrays.asList(new OperationMetricDimension(), new OperationMetricDimension(), - new OperationMetricDimension()))))); - model = BinaryData.fromObject(model).toObject(OperationInner.class); - Assertions.assertEquals("itjz", model.name()); - Assertions.assertEquals("lusarh", model.origin()); - Assertions.assertEquals("cqhsm", model.display().description()); - Assertions.assertEquals("rkdtmlxh", model.display().provider()); - Assertions.assertEquals("uksjtxukcdmp", model.display().resource()); - Assertions.assertEquals("cryuan", model.display().operation()); - Assertions.assertEquals("xtayr", model.serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("m", model.serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("fp", model.serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("qniwbybrkxvdumj", model.serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("tfwvukxgaudc", - model.serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("nhsjcnyej", - model.serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("ryhtnapczwlokjy", model.serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("kkvnipjox", - model.serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("nchgej", - model.serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("odmailzyd", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("o", model.serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java deleted file mode 100644 index 12a2758f83c1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationListResponseTests.java +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.OperationInner; -import com.azure.resourcemanager.datafactory.models.OperationDisplay; -import com.azure.resourcemanager.datafactory.models.OperationListResponse; -import com.azure.resourcemanager.datafactory.models.OperationLogSpecification; -import com.azure.resourcemanager.datafactory.models.OperationMetricSpecification; -import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationListResponse model = BinaryData.fromString( - "{\"value\":[{\"name\":\"quvgjxpybczme\",\"origin\":\"tzopbsphrupidgsy\",\"display\":{\"description\":\"jhphoyc\",\"provider\":\"xaobhdxbmtqioqjz\",\"resource\":\"tbmufpo\",\"operation\":\"oizh\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{},{},{}],\"metricSpecifications\":[{},{},{}]}}},{\"name\":\"oqijgkdmbpaz\",\"origin\":\"bc\",\"display\":{\"description\":\"dznrbtcqq\",\"provider\":\"qglhq\",\"resource\":\"ufo\",\"operation\":\"jywif\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{},{}],\"metricSpecifications\":[{}]}}},{\"name\":\"f\",\"origin\":\"lzl\",\"display\":{\"description\":\"rifkwm\",\"provider\":\"ktsizntocipaou\",\"resource\":\"psqucmpoyf\",\"operation\":\"fogknygjofjdde\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{},{},{},{}],\"metricSpecifications\":[{},{}]}}}],\"nextLink\":\"wnw\"}") - .toObject(OperationListResponse.class); - Assertions.assertEquals("quvgjxpybczme", model.value().get(0).name()); - Assertions.assertEquals("tzopbsphrupidgsy", model.value().get(0).origin()); - Assertions.assertEquals("jhphoyc", model.value().get(0).display().description()); - Assertions.assertEquals("xaobhdxbmtqioqjz", model.value().get(0).display().provider()); - Assertions.assertEquals("tbmufpo", model.value().get(0).display().resource()); - Assertions.assertEquals("oizh", model.value().get(0).display().operation()); - Assertions.assertEquals("wnw", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationListResponse model = new OperationListResponse() - .withValue(Arrays.asList( - new OperationInner().withName("quvgjxpybczme") - .withOrigin("tzopbsphrupidgsy") - .withDisplay(new OperationDisplay().withDescription("jhphoyc") - .withProvider("xaobhdxbmtqioqjz") - .withResource("tbmufpo") - .withOperation("oizh")) - .withServiceSpecification(new OperationServiceSpecification() - .withLogSpecifications(Arrays.asList(new OperationLogSpecification(), - new OperationLogSpecification(), new OperationLogSpecification())) - .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification(), - new OperationMetricSpecification(), new OperationMetricSpecification()))), - new OperationInner().withName("oqijgkdmbpaz") - .withOrigin("bc") - .withDisplay(new OperationDisplay().withDescription("dznrbtcqq") - .withProvider("qglhq") - .withResource("ufo") - .withOperation("jywif")) - .withServiceSpecification(new OperationServiceSpecification() - .withLogSpecifications( - Arrays.asList(new OperationLogSpecification(), new OperationLogSpecification())) - .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification()))), - new OperationInner().withName("f") - .withOrigin("lzl") - .withDisplay(new OperationDisplay().withDescription("rifkwm") - .withProvider("ktsizntocipaou") - .withResource("psqucmpoyf") - .withOperation("fogknygjofjdde")) - .withServiceSpecification(new OperationServiceSpecification() - .withLogSpecifications( - Arrays.asList(new OperationLogSpecification(), new OperationLogSpecification(), - new OperationLogSpecification(), new OperationLogSpecification())) - .withMetricSpecifications( - Arrays.asList(new OperationMetricSpecification(), new OperationMetricSpecification()))))) - .withNextLink("wnw"); - model = BinaryData.fromObject(model).toObject(OperationListResponse.class); - Assertions.assertEquals("quvgjxpybczme", model.value().get(0).name()); - Assertions.assertEquals("tzopbsphrupidgsy", model.value().get(0).origin()); - Assertions.assertEquals("jhphoyc", model.value().get(0).display().description()); - Assertions.assertEquals("xaobhdxbmtqioqjz", model.value().get(0).display().provider()); - Assertions.assertEquals("tbmufpo", model.value().get(0).display().resource()); - Assertions.assertEquals("oizh", model.value().get(0).display().operation()); - Assertions.assertEquals("wnw", model.nextLink()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java deleted file mode 100644 index c14a0858e383..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationLogSpecificationTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationLogSpecification; -import org.junit.jupiter.api.Assertions; - -public final class OperationLogSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationLogSpecification model = BinaryData - .fromString("{\"name\":\"ifiyipjxsqwpgrj\",\"displayName\":\"norcjxvsnbyxqab\",\"blobDuration\":\"ocpcy\"}") - .toObject(OperationLogSpecification.class); - Assertions.assertEquals("ifiyipjxsqwpgrj", model.name()); - Assertions.assertEquals("norcjxvsnbyxqab", model.displayName()); - Assertions.assertEquals("ocpcy", model.blobDuration()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationLogSpecification model = new OperationLogSpecification().withName("ifiyipjxsqwpgrj") - .withDisplayName("norcjxvsnbyxqab") - .withBlobDuration("ocpcy"); - model = BinaryData.fromObject(model).toObject(OperationLogSpecification.class); - Assertions.assertEquals("ifiyipjxsqwpgrj", model.name()); - Assertions.assertEquals("norcjxvsnbyxqab", model.displayName()); - Assertions.assertEquals("ocpcy", model.blobDuration()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricAvailabilityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricAvailabilityTests.java deleted file mode 100644 index 90b0d7d1c715..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricAvailabilityTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationMetricAvailability; -import org.junit.jupiter.api.Assertions; - -public final class OperationMetricAvailabilityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationMetricAvailability model - = BinaryData.fromString("{\"timeGrain\":\"ddntwndei\",\"blobDuration\":\"twnpzaoqvuhrhcf\"}") - .toObject(OperationMetricAvailability.class); - Assertions.assertEquals("ddntwndei", model.timeGrain()); - Assertions.assertEquals("twnpzaoqvuhrhcf", model.blobDuration()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationMetricAvailability model - = new OperationMetricAvailability().withTimeGrain("ddntwndei").withBlobDuration("twnpzaoqvuhrhcf"); - model = BinaryData.fromObject(model).toObject(OperationMetricAvailability.class); - Assertions.assertEquals("ddntwndei", model.timeGrain()); - Assertions.assertEquals("twnpzaoqvuhrhcf", model.blobDuration()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java deleted file mode 100644 index ff1b280faca1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricDimensionTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationMetricDimension; -import org.junit.jupiter.api.Assertions; - -public final class OperationMetricDimensionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationMetricDimension model - = BinaryData.fromString("{\"name\":\"yd\",\"displayName\":\"lmjthjq\",\"toBeExportedForShoebox\":false}") - .toObject(OperationMetricDimension.class); - Assertions.assertEquals("yd", model.name()); - Assertions.assertEquals("lmjthjq", model.displayName()); - Assertions.assertEquals(false, model.toBeExportedForShoebox()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationMetricDimension model = new OperationMetricDimension().withName("yd") - .withDisplayName("lmjthjq") - .withToBeExportedForShoebox(false); - model = BinaryData.fromObject(model).toObject(OperationMetricDimension.class); - Assertions.assertEquals("yd", model.name()); - Assertions.assertEquals("lmjthjq", model.displayName()); - Assertions.assertEquals(false, model.toBeExportedForShoebox()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java deleted file mode 100644 index 24b8fd36a913..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationMetricSpecificationTests.java +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationMetricAvailability; -import com.azure.resourcemanager.datafactory.models.OperationMetricDimension; -import com.azure.resourcemanager.datafactory.models.OperationMetricSpecification; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationMetricSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationMetricSpecification model = BinaryData.fromString( - "{\"name\":\"urzafb\",\"displayName\":\"j\",\"displayDescription\":\"btoqcjmkljavbqid\",\"unit\":\"ajzyul\",\"aggregationType\":\"u\",\"enableRegionalMdmAccount\":\"krlkhbzhfepg\",\"sourceMdmAccount\":\"qex\",\"sourceMdmNamespace\":\"ocxscpaierhhbcs\",\"availabilities\":[{\"timeGrain\":\"majtjaod\",\"blobDuration\":\"bnbdxkqpxokajion\"},{\"timeGrain\":\"mexgstxgcp\",\"blobDuration\":\"gmaajrm\"},{\"timeGrain\":\"jwzrl\",\"blobDuration\":\"mcl\"}],\"dimensions\":[{\"name\":\"coejctbzaqs\",\"displayName\":\"y\",\"toBeExportedForShoebox\":false},{\"name\":\"fkgukdkexxppof\",\"displayName\":\"axcfjpgddtocjjx\",\"toBeExportedForShoebox\":true},{\"name\":\"o\",\"displayName\":\"xhdzxibqeojnx\",\"toBeExportedForShoebox\":true}]}") - .toObject(OperationMetricSpecification.class); - Assertions.assertEquals("urzafb", model.name()); - Assertions.assertEquals("j", model.displayName()); - Assertions.assertEquals("btoqcjmkljavbqid", model.displayDescription()); - Assertions.assertEquals("ajzyul", model.unit()); - Assertions.assertEquals("u", model.aggregationType()); - Assertions.assertEquals("krlkhbzhfepg", model.enableRegionalMdmAccount()); - Assertions.assertEquals("qex", model.sourceMdmAccount()); - Assertions.assertEquals("ocxscpaierhhbcs", model.sourceMdmNamespace()); - Assertions.assertEquals("majtjaod", model.availabilities().get(0).timeGrain()); - Assertions.assertEquals("bnbdxkqpxokajion", model.availabilities().get(0).blobDuration()); - Assertions.assertEquals("coejctbzaqs", model.dimensions().get(0).name()); - Assertions.assertEquals("y", model.dimensions().get(0).displayName()); - Assertions.assertEquals(false, model.dimensions().get(0).toBeExportedForShoebox()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationMetricSpecification model = new OperationMetricSpecification().withName("urzafb") - .withDisplayName("j") - .withDisplayDescription("btoqcjmkljavbqid") - .withUnit("ajzyul") - .withAggregationType("u") - .withEnableRegionalMdmAccount("krlkhbzhfepg") - .withSourceMdmAccount("qex") - .withSourceMdmNamespace("ocxscpaierhhbcs") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("majtjaod").withBlobDuration("bnbdxkqpxokajion"), - new OperationMetricAvailability().withTimeGrain("mexgstxgcp").withBlobDuration("gmaajrm"), - new OperationMetricAvailability().withTimeGrain("jwzrl").withBlobDuration("mcl"))) - .withDimensions(Arrays.asList( - new OperationMetricDimension().withName("coejctbzaqs") - .withDisplayName("y") - .withToBeExportedForShoebox(false), - new OperationMetricDimension().withName("fkgukdkexxppof") - .withDisplayName("axcfjpgddtocjjx") - .withToBeExportedForShoebox(true), - new OperationMetricDimension().withName("o") - .withDisplayName("xhdzxibqeojnx") - .withToBeExportedForShoebox(true))); - model = BinaryData.fromObject(model).toObject(OperationMetricSpecification.class); - Assertions.assertEquals("urzafb", model.name()); - Assertions.assertEquals("j", model.displayName()); - Assertions.assertEquals("btoqcjmkljavbqid", model.displayDescription()); - Assertions.assertEquals("ajzyul", model.unit()); - Assertions.assertEquals("u", model.aggregationType()); - Assertions.assertEquals("krlkhbzhfepg", model.enableRegionalMdmAccount()); - Assertions.assertEquals("qex", model.sourceMdmAccount()); - Assertions.assertEquals("ocxscpaierhhbcs", model.sourceMdmNamespace()); - Assertions.assertEquals("majtjaod", model.availabilities().get(0).timeGrain()); - Assertions.assertEquals("bnbdxkqpxokajion", model.availabilities().get(0).blobDuration()); - Assertions.assertEquals("coejctbzaqs", model.dimensions().get(0).name()); - Assertions.assertEquals("y", model.dimensions().get(0).displayName()); - Assertions.assertEquals(false, model.dimensions().get(0).toBeExportedForShoebox()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java deleted file mode 100644 index ec62c748a605..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationPropertiesTests.java +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.OperationProperties; -import com.azure.resourcemanager.datafactory.models.OperationLogSpecification; -import com.azure.resourcemanager.datafactory.models.OperationMetricAvailability; -import com.azure.resourcemanager.datafactory.models.OperationMetricDimension; -import com.azure.resourcemanager.datafactory.models.OperationMetricSpecification; -import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationProperties model = BinaryData.fromString( - "{\"serviceSpecification\":{\"logSpecifications\":[{\"name\":\"atscmd\",\"displayName\":\"jhulsuuvmkjo\",\"blobDuration\":\"rwfndiod\"},{\"name\":\"slwejdpvw\",\"displayName\":\"oqpsoa\",\"blobDuration\":\"tazak\"}],\"metricSpecifications\":[{\"name\":\"hbcryffdfdosyge\",\"displayName\":\"aojakhmsbzjhcrz\",\"displayDescription\":\"dphlxaolt\",\"unit\":\"trg\",\"aggregationType\":\"bpf\",\"enableRegionalMdmAccount\":\"s\",\"sourceMdmAccount\":\"zgvfcjrwz\",\"sourceMdmNamespace\":\"xjtfelluwfzit\",\"availabilities\":[{\"timeGrain\":\"qfpjk\",\"blobDuration\":\"xofpdvhpfxxypi\"},{\"timeGrain\":\"nmayhuybb\",\"blobDuration\":\"odepoogin\"},{\"timeGrain\":\"amiheognarxz\",\"blobDuration\":\"heotusiv\"}],\"dimensions\":[{\"name\":\"ciqihnhung\",\"displayName\":\"jzrnf\",\"toBeExportedForShoebox\":true}]},{\"name\":\"ispe\",\"displayName\":\"tzfkufubl\",\"displayDescription\":\"fxqeof\",\"unit\":\"e\",\"aggregationType\":\"hqjbasvmsmj\",\"enableRegionalMdmAccount\":\"lngsntnbybkzgcwr\",\"sourceMdmAccount\":\"lxxwrljdouskc\",\"sourceMdmNamespace\":\"kocrcjdkwtnhx\",\"availabilities\":[{\"timeGrain\":\"iksqr\",\"blobDuration\":\"ssainqpjwnzll\"},{\"timeGrain\":\"mppeebvmgxs\",\"blobDuration\":\"kyqduujit\"},{\"timeGrain\":\"czdzev\",\"blobDuration\":\"hkr\"},{\"timeGrain\":\"d\",\"blobDuration\":\"p\"}],\"dimensions\":[{\"name\":\"kvwrwjfeu\",\"displayName\":\"hutje\",\"toBeExportedForShoebox\":false},{\"name\":\"ldhugjzzdatqxh\",\"displayName\":\"dgeablgphu\",\"toBeExportedForShoebox\":true},{\"name\":\"dvkaozw\",\"displayName\":\"ftyhxhurokf\",\"toBeExportedForShoebox\":true}]},{\"name\":\"lniwpwcukjfkgiaw\",\"displayName\":\"lryplwckbasyy\",\"displayDescription\":\"ddhsgcbacphe\",\"unit\":\"ot\",\"aggregationType\":\"qgoulznd\",\"enableRegionalMdmAccount\":\"kwy\",\"sourceMdmAccount\":\"gfgibm\",\"sourceMdmNamespace\":\"gakeqsr\",\"availabilities\":[{\"timeGrain\":\"qqedqytbciqfou\",\"blobDuration\":\"mmnkzsmodmgl\"},{\"timeGrain\":\"gpbkwtmut\",\"blobDuration\":\"qktapspwgcuert\"},{\"timeGrain\":\"kdosvqw\",\"blobDuration\":\"mdgbbjfdd\"},{\"timeGrain\":\"bmbexppbhtqqro\",\"blobDuration\":\"p\"}],\"dimensions\":[{\"name\":\"lgbquxig\",\"displayName\":\"jgzjaoyfhrtx\",\"toBeExportedForShoebox\":false}]}]}}") - .toObject(OperationProperties.class); - Assertions.assertEquals("atscmd", model.serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("jhulsuuvmkjo", model.serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("rwfndiod", model.serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("hbcryffdfdosyge", model.serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("aojakhmsbzjhcrz", - model.serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("dphlxaolt", - model.serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("trg", model.serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("bpf", model.serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("s", - model.serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("zgvfcjrwz", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("xjtfelluwfzit", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); - Assertions.assertEquals("qfpjk", - model.serviceSpecification().metricSpecifications().get(0).availabilities().get(0).timeGrain()); - Assertions.assertEquals("xofpdvhpfxxypi", - model.serviceSpecification().metricSpecifications().get(0).availabilities().get(0).blobDuration()); - Assertions.assertEquals("ciqihnhung", - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).name()); - Assertions.assertEquals("jzrnf", - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).displayName()); - Assertions.assertEquals(true, - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).toBeExportedForShoebox()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationProperties model - = new OperationProperties().withServiceSpecification(new OperationServiceSpecification() - .withLogSpecifications(Arrays.asList( - new OperationLogSpecification().withName("atscmd") - .withDisplayName("jhulsuuvmkjo") - .withBlobDuration("rwfndiod"), - new OperationLogSpecification().withName("slwejdpvw") - .withDisplayName("oqpsoa") - .withBlobDuration("tazak"))) - .withMetricSpecifications(Arrays.asList( - new OperationMetricSpecification().withName("hbcryffdfdosyge") - .withDisplayName("aojakhmsbzjhcrz") - .withDisplayDescription("dphlxaolt") - .withUnit("trg") - .withAggregationType("bpf") - .withEnableRegionalMdmAccount("s") - .withSourceMdmAccount("zgvfcjrwz") - .withSourceMdmNamespace("xjtfelluwfzit") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("qfpjk").withBlobDuration("xofpdvhpfxxypi"), - new OperationMetricAvailability().withTimeGrain("nmayhuybb").withBlobDuration("odepoogin"), - new OperationMetricAvailability().withTimeGrain("amiheognarxz") - .withBlobDuration("heotusiv"))) - .withDimensions(Arrays.asList(new OperationMetricDimension().withName("ciqihnhung") - .withDisplayName("jzrnf") - .withToBeExportedForShoebox(true))), - new OperationMetricSpecification().withName("ispe") - .withDisplayName("tzfkufubl") - .withDisplayDescription("fxqeof") - .withUnit("e") - .withAggregationType("hqjbasvmsmj") - .withEnableRegionalMdmAccount("lngsntnbybkzgcwr") - .withSourceMdmAccount("lxxwrljdouskc") - .withSourceMdmNamespace("kocrcjdkwtnhx") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("iksqr").withBlobDuration("ssainqpjwnzll"), - new OperationMetricAvailability().withTimeGrain("mppeebvmgxs") - .withBlobDuration("kyqduujit"), - new OperationMetricAvailability().withTimeGrain("czdzev").withBlobDuration("hkr"), - new OperationMetricAvailability().withTimeGrain("d").withBlobDuration("p"))) - .withDimensions(Arrays.asList( - new OperationMetricDimension().withName("kvwrwjfeu") - .withDisplayName("hutje") - .withToBeExportedForShoebox(false), - new OperationMetricDimension().withName("ldhugjzzdatqxh") - .withDisplayName("dgeablgphu") - .withToBeExportedForShoebox(true), - new OperationMetricDimension().withName("dvkaozw") - .withDisplayName("ftyhxhurokf") - .withToBeExportedForShoebox(true))), - new OperationMetricSpecification().withName("lniwpwcukjfkgiaw") - .withDisplayName("lryplwckbasyy") - .withDisplayDescription("ddhsgcbacphe") - .withUnit("ot") - .withAggregationType("qgoulznd") - .withEnableRegionalMdmAccount("kwy") - .withSourceMdmAccount("gfgibm") - .withSourceMdmNamespace("gakeqsr") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("qqedqytbciqfou") - .withBlobDuration("mmnkzsmodmgl"), - new OperationMetricAvailability().withTimeGrain("gpbkwtmut") - .withBlobDuration("qktapspwgcuert"), - new OperationMetricAvailability().withTimeGrain("kdosvqw").withBlobDuration("mdgbbjfdd"), - new OperationMetricAvailability().withTimeGrain("bmbexppbhtqqro").withBlobDuration("p"))) - .withDimensions(Arrays.asList(new OperationMetricDimension().withName("lgbquxig") - .withDisplayName("jgzjaoyfhrtx") - .withToBeExportedForShoebox(false)))))); - model = BinaryData.fromObject(model).toObject(OperationProperties.class); - Assertions.assertEquals("atscmd", model.serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("jhulsuuvmkjo", model.serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("rwfndiod", model.serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("hbcryffdfdosyge", model.serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("aojakhmsbzjhcrz", - model.serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("dphlxaolt", - model.serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("trg", model.serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("bpf", model.serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("s", - model.serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("zgvfcjrwz", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("xjtfelluwfzit", - model.serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); - Assertions.assertEquals("qfpjk", - model.serviceSpecification().metricSpecifications().get(0).availabilities().get(0).timeGrain()); - Assertions.assertEquals("xofpdvhpfxxypi", - model.serviceSpecification().metricSpecifications().get(0).availabilities().get(0).blobDuration()); - Assertions.assertEquals("ciqihnhung", - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).name()); - Assertions.assertEquals("jzrnf", - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).displayName()); - Assertions.assertEquals(true, - model.serviceSpecification().metricSpecifications().get(0).dimensions().get(0).toBeExportedForShoebox()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java deleted file mode 100644 index 426ce6ebce17..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationServiceSpecificationTests.java +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OperationLogSpecification; -import com.azure.resourcemanager.datafactory.models.OperationMetricAvailability; -import com.azure.resourcemanager.datafactory.models.OperationMetricDimension; -import com.azure.resourcemanager.datafactory.models.OperationMetricSpecification; -import com.azure.resourcemanager.datafactory.models.OperationServiceSpecification; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationServiceSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationServiceSpecification model = BinaryData.fromString( - "{\"logSpecifications\":[{\"name\":\"ujysvle\",\"displayName\":\"vfqawrlyxwjkcpr\",\"blobDuration\":\"wbxgjvt\"},{\"name\":\"p\",\"displayName\":\"szdnr\",\"blobDuration\":\"qguhmuo\"}],\"metricSpecifications\":[{\"name\":\"rwzwbng\",\"displayName\":\"tnwu\",\"displayDescription\":\"gazxuf\",\"unit\":\"uckyf\",\"aggregationType\":\"rfidfvzwdz\",\"enableRegionalMdmAccount\":\"tymw\",\"sourceMdmAccount\":\"dkfthwxmnt\",\"sourceMdmNamespace\":\"waopvkmijcmmxd\",\"availabilities\":[{\"timeGrain\":\"fsrpymzidnse\",\"blobDuration\":\"xtbzsgfyccsne\"},{\"timeGrain\":\"dwzjeiach\",\"blobDuration\":\"osfln\"},{\"timeGrain\":\"sfqpteehz\",\"blobDuration\":\"ypyqrimzinp\"}],\"dimensions\":[{\"name\":\"dkirsoodqxhcr\",\"displayName\":\"ohjtckw\",\"toBeExportedForShoebox\":true}]}]}") - .toObject(OperationServiceSpecification.class); - Assertions.assertEquals("ujysvle", model.logSpecifications().get(0).name()); - Assertions.assertEquals("vfqawrlyxwjkcpr", model.logSpecifications().get(0).displayName()); - Assertions.assertEquals("wbxgjvt", model.logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("rwzwbng", model.metricSpecifications().get(0).name()); - Assertions.assertEquals("tnwu", model.metricSpecifications().get(0).displayName()); - Assertions.assertEquals("gazxuf", model.metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("uckyf", model.metricSpecifications().get(0).unit()); - Assertions.assertEquals("rfidfvzwdz", model.metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("tymw", model.metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("dkfthwxmnt", model.metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("waopvkmijcmmxd", model.metricSpecifications().get(0).sourceMdmNamespace()); - Assertions.assertEquals("fsrpymzidnse", - model.metricSpecifications().get(0).availabilities().get(0).timeGrain()); - Assertions.assertEquals("xtbzsgfyccsne", - model.metricSpecifications().get(0).availabilities().get(0).blobDuration()); - Assertions.assertEquals("dkirsoodqxhcr", model.metricSpecifications().get(0).dimensions().get(0).name()); - Assertions.assertEquals("ohjtckw", model.metricSpecifications().get(0).dimensions().get(0).displayName()); - Assertions.assertEquals(true, model.metricSpecifications().get(0).dimensions().get(0).toBeExportedForShoebox()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationServiceSpecification model = new OperationServiceSpecification() - .withLogSpecifications(Arrays.asList( - new OperationLogSpecification() - .withName("ujysvle") - .withDisplayName("vfqawrlyxwjkcpr") - .withBlobDuration("wbxgjvt"), - new OperationLogSpecification().withName("p").withDisplayName("szdnr").withBlobDuration("qguhmuo"))) - .withMetricSpecifications(Arrays.asList(new OperationMetricSpecification().withName("rwzwbng") - .withDisplayName("tnwu") - .withDisplayDescription("gazxuf") - .withUnit("uckyf") - .withAggregationType("rfidfvzwdz") - .withEnableRegionalMdmAccount("tymw") - .withSourceMdmAccount("dkfthwxmnt") - .withSourceMdmNamespace("waopvkmijcmmxd") - .withAvailabilities(Arrays.asList( - new OperationMetricAvailability().withTimeGrain("fsrpymzidnse").withBlobDuration("xtbzsgfyccsne"), - new OperationMetricAvailability().withTimeGrain("dwzjeiach").withBlobDuration("osfln"), - new OperationMetricAvailability().withTimeGrain("sfqpteehz").withBlobDuration("ypyqrimzinp"))) - .withDimensions(Arrays.asList(new OperationMetricDimension().withName("dkirsoodqxhcr") - .withDisplayName("ohjtckw") - .withToBeExportedForShoebox(true))))); - model = BinaryData.fromObject(model).toObject(OperationServiceSpecification.class); - Assertions.assertEquals("ujysvle", model.logSpecifications().get(0).name()); - Assertions.assertEquals("vfqawrlyxwjkcpr", model.logSpecifications().get(0).displayName()); - Assertions.assertEquals("wbxgjvt", model.logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("rwzwbng", model.metricSpecifications().get(0).name()); - Assertions.assertEquals("tnwu", model.metricSpecifications().get(0).displayName()); - Assertions.assertEquals("gazxuf", model.metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("uckyf", model.metricSpecifications().get(0).unit()); - Assertions.assertEquals("rfidfvzwdz", model.metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("tymw", model.metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("dkfthwxmnt", model.metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("waopvkmijcmmxd", model.metricSpecifications().get(0).sourceMdmNamespace()); - Assertions.assertEquals("fsrpymzidnse", - model.metricSpecifications().get(0).availabilities().get(0).timeGrain()); - Assertions.assertEquals("xtbzsgfyccsne", - model.metricSpecifications().get(0).availabilities().get(0).blobDuration()); - Assertions.assertEquals("dkirsoodqxhcr", model.metricSpecifications().get(0).dimensions().get(0).name()); - Assertions.assertEquals("ohjtckw", model.metricSpecifications().get(0).dimensions().get(0).displayName()); - Assertions.assertEquals(true, model.metricSpecifications().get(0).dimensions().get(0).toBeExportedForShoebox()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java deleted file mode 100644 index 6d487d711b0c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OperationsListMockTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.Operation; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OperationsListMockTests { - @Test - public void testList() throws Exception { - String responseStr - = "{\"value\":[{\"name\":\"saedglubqtfcupj\",\"origin\":\"te\",\"display\":{\"description\":\"ujiqmksafjhtl\",\"provider\":\"kghtsfppjunk\",\"resource\":\"thkqnyvufvzrqa\",\"operation\":\"epc\"},\"properties\":{\"serviceSpecification\":{\"logSpecifications\":[{\"name\":\"etasiji\",\"displayName\":\"qwomkzcmwqfd\",\"blobDuration\":\"pmvlfm\"},{\"name\":\"umjmpsxz\",\"displayName\":\"af\",\"blobDuration\":\"d\"},{\"name\":\"zporjhubzkzjazf\",\"displayName\":\"wvxq\",\"blobDuration\":\"n\"},{\"name\":\"sdgmoxnelhxhzfyz\",\"displayName\":\"kofcsvipwa\",\"blobDuration\":\"huctprgpmgtjvu\"}],\"metricSpecifications\":[{\"name\":\"fzcsklvtceaoi\",\"displayName\":\"rqlcdhebjfhp\",\"displayDescription\":\"wwesaqsuqpskv\",\"unit\":\"dlra\",\"aggregationType\":\"diathhxqs\",\"enableRegionalMdmAccount\":\"yleyopgy\",\"sourceMdmAccount\":\"rnyf\",\"sourceMdmNamespace\":\"oaomogkpcwffo\",\"availabilities\":[{},{}],\"dimensions\":[{},{}]},{\"name\":\"gjuzgqkx\",\"displayName\":\"avbteaegyojy\",\"displayDescription\":\"epcdhqjczh\",\"unit\":\"tlvujbh\",\"aggregationType\":\"szrhf\",\"enableRegionalMdmAccount\":\"ihkjj\",\"sourceMdmAccount\":\"itmuriizyrgzxpr\",\"sourceMdmNamespace\":\"wmasodsmj\",\"availabilities\":[{},{},{}],\"dimensions\":[{},{}]},{\"name\":\"mhrlgid\",\"displayName\":\"brdhuwcqn\",\"displayDescription\":\"pjd\",\"unit\":\"ienkliyfg\",\"aggregationType\":\"wky\",\"enableRegionalMdmAccount\":\"ndiybdoyyk\",\"sourceMdmAccount\":\"diandktwijoxkkyn\",\"sourceMdmNamespace\":\"qtxpjfo\",\"availabilities\":[{},{},{},{}],\"dimensions\":[{},{}]},{\"name\":\"kqq\",\"displayName\":\"imoinqzva\",\"displayDescription\":\"puggy\",\"unit\":\"chamwofqnttbk\",\"aggregationType\":\"gupxnu\",\"enableRegionalMdmAccount\":\"hguubpmvp\",\"sourceMdmAccount\":\"uiyogvafbdz\",\"sourceMdmNamespace\":\"plolcalyvcxv\",\"availabilities\":[{},{},{}],\"dimensions\":[{}]}]}}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.operations().list(com.azure.core.util.Context.NONE); - - Assertions.assertEquals("saedglubqtfcupj", response.iterator().next().name()); - Assertions.assertEquals("te", response.iterator().next().origin()); - Assertions.assertEquals("ujiqmksafjhtl", response.iterator().next().display().description()); - Assertions.assertEquals("kghtsfppjunk", response.iterator().next().display().provider()); - Assertions.assertEquals("thkqnyvufvzrqa", response.iterator().next().display().resource()); - Assertions.assertEquals("epc", response.iterator().next().display().operation()); - Assertions.assertEquals("etasiji", - response.iterator().next().serviceSpecification().logSpecifications().get(0).name()); - Assertions.assertEquals("qwomkzcmwqfd", - response.iterator().next().serviceSpecification().logSpecifications().get(0).displayName()); - Assertions.assertEquals("pmvlfm", - response.iterator().next().serviceSpecification().logSpecifications().get(0).blobDuration()); - Assertions.assertEquals("fzcsklvtceaoi", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).name()); - Assertions.assertEquals("rqlcdhebjfhp", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).displayName()); - Assertions.assertEquals("wwesaqsuqpskv", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).displayDescription()); - Assertions.assertEquals("dlra", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).unit()); - Assertions.assertEquals("diathhxqs", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).aggregationType()); - Assertions.assertEquals("yleyopgy", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).enableRegionalMdmAccount()); - Assertions.assertEquals("rnyf", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).sourceMdmAccount()); - Assertions.assertEquals("oaomogkpcwffo", - response.iterator().next().serviceSpecification().metricSpecifications().get(0).sourceMdmNamespace()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java deleted file mode 100644 index 6ae26cc4c286..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageLocationTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OracleCloudStorageLocation; - -public final class OracleCloudStorageLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleCloudStorageLocation model = BinaryData.fromString( - "{\"type\":\"OracleCloudStorageLocation\",\"bucketName\":\"datapwr\",\"version\":\"datafpcfjf\",\"folderPath\":\"datalgzawkgyepeyamnn\",\"fileName\":\"datamdiawpzx\",\"\":{\"azihq\":\"datantmkctdhuosgwqps\",\"ibniynts\":\"datadvqgcnbhcbmjkz\",\"ogjrhskbwgm\":\"datajmfmeftvhkm\"}}") - .toObject(OracleCloudStorageLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleCloudStorageLocation model = new OracleCloudStorageLocation().withFolderPath("datalgzawkgyepeyamnn") - .withFileName("datamdiawpzx") - .withBucketName("datapwr") - .withVersion("datafpcfjf"); - model = BinaryData.fromObject(model).toObject(OracleCloudStorageLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java deleted file mode 100644 index e208ddda0d31..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleCloudStorageReadSettingsTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OracleCloudStorageReadSettings; - -public final class OracleCloudStorageReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleCloudStorageReadSettings model = BinaryData.fromString( - "{\"type\":\"OracleCloudStorageReadSettings\",\"recursive\":\"datazngmwemzvqmkjsvx\",\"wildcardFolderPath\":\"datagccvhnoqlo\",\"wildcardFileName\":\"datapcntqoszrwkkxd\",\"prefix\":\"datauyygdoi\",\"fileListPath\":\"datafcnxrbyvbd\",\"enablePartitionDiscovery\":\"dataztjtzsgenbdq\",\"partitionRootPath\":\"dataf\",\"deleteFilesAfterCompletion\":\"datacngjcdykkpnfvz\",\"modifiedDatetimeStart\":\"datatxgsnusqjjcwggji\",\"modifiedDatetimeEnd\":\"datacbmhsfgaxuvaej\",\"maxConcurrentConnections\":\"datafszxnzjxvdn\",\"disableMetricsCollection\":\"datadlptulwgpdzlfx\",\"\":{\"ynwaofnoiphwaedp\":\"datawgm\"}}") - .toObject(OracleCloudStorageReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleCloudStorageReadSettings model - = new OracleCloudStorageReadSettings().withMaxConcurrentConnections("datafszxnzjxvdn") - .withDisableMetricsCollection("datadlptulwgpdzlfx") - .withRecursive("datazngmwemzvqmkjsvx") - .withWildcardFolderPath("datagccvhnoqlo") - .withWildcardFileName("datapcntqoszrwkkxd") - .withPrefix("datauyygdoi") - .withFileListPath("datafcnxrbyvbd") - .withEnablePartitionDiscovery("dataztjtzsgenbdq") - .withPartitionRootPath("dataf") - .withDeleteFilesAfterCompletion("datacngjcdykkpnfvz") - .withModifiedDatetimeStart("datatxgsnusqjjcwggji") - .withModifiedDatetimeEnd("datacbmhsfgaxuvaej"); - model = BinaryData.fromObject(model).toObject(OracleCloudStorageReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java deleted file mode 100644 index f9f08e52fd7b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OraclePartitionSettingsTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OraclePartitionSettings; - -public final class OraclePartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OraclePartitionSettings model = BinaryData.fromString( - "{\"partitionNames\":\"datauknbtxt\",\"partitionColumnName\":\"datautdrrqqajhklttl\",\"partitionUpperBound\":\"datawdrt\",\"partitionLowerBound\":\"datajfty\"}") - .toObject(OraclePartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OraclePartitionSettings model = new OraclePartitionSettings().withPartitionNames("datauknbtxt") - .withPartitionColumnName("datautdrrqqajhklttl") - .withPartitionUpperBound("datawdrt") - .withPartitionLowerBound("datajfty"); - model = BinaryData.fromObject(model).toObject(OraclePartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java deleted file mode 100644 index 3eb519951b8b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudObjectDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.OracleServiceCloudObjectDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class OracleServiceCloudObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleServiceCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"OracleServiceCloudObject\",\"typeProperties\":{\"tableName\":\"datazgfhzdzahktxvcbi\"},\"description\":\"ecthotbkjwh\",\"structure\":\"datapxjvtwk\",\"schema\":\"datadpay\",\"linkedServiceName\":{\"referenceName\":\"opqiwuzrg\",\"parameters\":{\"qqjobsyn\":\"datajfkgb\",\"onjqhdheosx\":\"dataen\",\"wypphtjtntcwgpd\":\"datawfudmpf\",\"spkvrmp\":\"databglaecctokfspvjr\"}},\"parameters\":{\"gmc\":{\"type\":\"Float\",\"defaultValue\":\"datatwjwiyyeo\"},\"ioxbgom\":{\"type\":\"Object\",\"defaultValue\":\"datamvphwfnugslvfz\"}},\"annotations\":[\"dataprpmo\",\"dataxnwcgzuoxixtx\"],\"folder\":{\"name\":\"jsehbknnrnk\"},\"\":{\"zehcz\":\"dataawpcbso\",\"sxvppkjealkdb\":\"datanivco\",\"vbmyzuqfdqdktrj\":\"datawho\",\"szhh\":\"datao\"}}") - .toObject(OracleServiceCloudObjectDataset.class); - Assertions.assertEquals("ecthotbkjwh", model.description()); - Assertions.assertEquals("opqiwuzrg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("gmc").type()); - Assertions.assertEquals("jsehbknnrnk", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleServiceCloudObjectDataset model = new OracleServiceCloudObjectDataset().withDescription("ecthotbkjwh") - .withStructure("datapxjvtwk") - .withSchema("datadpay") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("opqiwuzrg") - .withParameters(mapOf("qqjobsyn", "datajfkgb", "onjqhdheosx", "dataen", "wypphtjtntcwgpd", - "datawfudmpf", "spkvrmp", "databglaecctokfspvjr"))) - .withParameters(mapOf("gmc", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatwjwiyyeo"), "ioxbgom", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datamvphwfnugslvfz"))) - .withAnnotations(Arrays.asList("dataprpmo", "dataxnwcgzuoxixtx")) - .withFolder(new DatasetFolder().withName("jsehbknnrnk")) - .withTableName("datazgfhzdzahktxvcbi"); - model = BinaryData.fromObject(model).toObject(OracleServiceCloudObjectDataset.class); - Assertions.assertEquals("ecthotbkjwh", model.description()); - Assertions.assertEquals("opqiwuzrg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("gmc").type()); - Assertions.assertEquals("jsehbknnrnk", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java deleted file mode 100644 index 3ad7d50abfc7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleServiceCloudSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OracleServiceCloudSource; - -public final class OracleServiceCloudSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleServiceCloudSource model = BinaryData.fromString( - "{\"type\":\"OracleServiceCloudSource\",\"query\":\"datarh\",\"queryTimeout\":\"datagaaaxigafa\",\"additionalColumns\":\"datatoo\",\"sourceRetryCount\":\"datazdoblpdtcyv\",\"sourceRetryWait\":\"datahboplavgfbvro\",\"maxConcurrentConnections\":\"datauexqweyslwlppoh\",\"disableMetricsCollection\":\"datafgalexy\",\"\":{\"jxtfvxcjd\":\"datakadtwdcg\",\"jkwltnsnhuvmok\":\"databsgvz\"}}") - .toObject(OracleServiceCloudSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleServiceCloudSource model = new OracleServiceCloudSource().withSourceRetryCount("datazdoblpdtcyv") - .withSourceRetryWait("datahboplavgfbvro") - .withMaxConcurrentConnections("datauexqweyslwlppoh") - .withDisableMetricsCollection("datafgalexy") - .withQueryTimeout("datagaaaxigafa") - .withAdditionalColumns("datatoo") - .withQuery("datarh"); - model = BinaryData.fromObject(model).toObject(OracleServiceCloudSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java deleted file mode 100644 index a9d64c7e9fcb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSinkTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OracleSink; - -public final class OracleSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleSink model = BinaryData.fromString( - "{\"type\":\"OracleSink\",\"preCopyScript\":\"datavnruodu\",\"writeBatchSize\":\"datahskh\",\"writeBatchTimeout\":\"datalvo\",\"sinkRetryCount\":\"dataddqxhegcolhqz\",\"sinkRetryWait\":\"datalqrunqwcrk\",\"maxConcurrentConnections\":\"databyxxyfnipy\",\"disableMetricsCollection\":\"datajgfbsfsv\",\"\":{\"ptnwpwskck\":\"dataejypokk\",\"pgvqioq\":\"datawymfy\"}}") - .toObject(OracleSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleSink model = new OracleSink().withWriteBatchSize("datahskh") - .withWriteBatchTimeout("datalvo") - .withSinkRetryCount("dataddqxhegcolhqz") - .withSinkRetryWait("datalqrunqwcrk") - .withMaxConcurrentConnections("databyxxyfnipy") - .withDisableMetricsCollection("datajgfbsfsv") - .withPreCopyScript("datavnruodu"); - model = BinaryData.fromObject(model).toObject(OracleSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java deleted file mode 100644 index 020d11c53cd1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleSourceTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OraclePartitionSettings; -import com.azure.resourcemanager.datafactory.models.OracleSource; - -public final class OracleSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleSource model = BinaryData.fromString( - "{\"type\":\"OracleSource\",\"oracleReaderQuery\":\"datad\",\"queryTimeout\":\"datadldm\",\"partitionOption\":\"dataqftywb\",\"partitionSettings\":{\"partitionNames\":\"datazhdciuxotb\",\"partitionColumnName\":\"datalgkkiuq\",\"partitionUpperBound\":\"datapafobpyeobrtaevq\",\"partitionLowerBound\":\"datadhpkiiunyr\"},\"additionalColumns\":\"datackelowsmr\",\"sourceRetryCount\":\"datatqhrtnqssqyn\",\"sourceRetryWait\":\"dataskitok\",\"maxConcurrentConnections\":\"dataamefzzgwjoau\",\"disableMetricsCollection\":\"datamdpndouylf\",\"\":{\"zassrs\":\"dataqinaokxouknzh\"}}") - .toObject(OracleSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleSource model = new OracleSource().withSourceRetryCount("datatqhrtnqssqyn") - .withSourceRetryWait("dataskitok") - .withMaxConcurrentConnections("dataamefzzgwjoau") - .withDisableMetricsCollection("datamdpndouylf") - .withOracleReaderQuery("datad") - .withQueryTimeout("datadldm") - .withPartitionOption("dataqftywb") - .withPartitionSettings(new OraclePartitionSettings().withPartitionNames("datazhdciuxotb") - .withPartitionColumnName("datalgkkiuq") - .withPartitionUpperBound("datapafobpyeobrtaevq") - .withPartitionLowerBound("datadhpkiiunyr")) - .withAdditionalColumns("datackelowsmr"); - model = BinaryData.fromObject(model).toObject(OracleSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java deleted file mode 100644 index f9344cfeb6f9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.OracleTableDataset; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class OracleTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleTableDataset model = BinaryData.fromString( - "{\"type\":\"OracleTable\",\"typeProperties\":{\"tableName\":\"datagjq\",\"schema\":\"datacant\",\"table\":\"datayxzxjmkanbclazof\"},\"description\":\"xvtemaspm\",\"structure\":\"dataydscdkxwd\",\"schema\":\"datajcbhaahnt\",\"linkedServiceName\":{\"referenceName\":\"felfh\",\"parameters\":{\"kkzdfivsu\":\"dataxo\"}},\"parameters\":{\"t\":{\"type\":\"SecureString\",\"defaultValue\":\"datawzzmr\"},\"kesmkwtzgfragj\":{\"type\":\"Object\",\"defaultValue\":\"datamfppinmg\"},\"qhvmmniiqy\":{\"type\":\"Object\",\"defaultValue\":\"datarxlobkdb\"}},\"annotations\":[\"datahjnskbggicnqw\",\"datact\",\"datawp\",\"datalxkrk\"],\"folder\":{\"name\":\"xrktjcjigcw\"},\"\":{\"evchefp\":\"dataanbqxa\",\"cgimmrim\":\"dataeedyybruholaem\",\"z\":\"dataabsqqlon\"}}") - .toObject(OracleTableDataset.class); - Assertions.assertEquals("xvtemaspm", model.description()); - Assertions.assertEquals("felfh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("t").type()); - Assertions.assertEquals("xrktjcjigcw", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleTableDataset model = new OracleTableDataset().withDescription("xvtemaspm") - .withStructure("dataydscdkxwd") - .withSchema("datajcbhaahnt") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("felfh").withParameters(mapOf("kkzdfivsu", "dataxo"))) - .withParameters(mapOf("t", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datawzzmr"), - "kesmkwtzgfragj", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datamfppinmg"), - "qhvmmniiqy", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datarxlobkdb"))) - .withAnnotations(Arrays.asList("datahjnskbggicnqw", "datact", "datawp", "datalxkrk")) - .withFolder(new DatasetFolder().withName("xrktjcjigcw")) - .withTableName("datagjq") - .withSchemaTypePropertiesSchema("datacant") - .withTable("datayxzxjmkanbclazof"); - model = BinaryData.fromObject(model).toObject(OracleTableDataset.class); - Assertions.assertEquals("xvtemaspm", model.description()); - Assertions.assertEquals("felfh", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("t").type()); - Assertions.assertEquals("xrktjcjigcw", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java deleted file mode 100644 index a235e3a831d7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OracleTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.OracleTableDatasetTypeProperties; - -public final class OracleTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OracleTableDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"datawcahdkmbjsmihr\",\"schema\":\"dataezbfsjwf\",\"table\":\"dataglkvbgukbsv\"}") - .toObject(OracleTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OracleTableDatasetTypeProperties model - = new OracleTableDatasetTypeProperties().withTableName("datawcahdkmbjsmihr") - .withSchema("dataezbfsjwf") - .withTable("dataglkvbgukbsv"); - model = BinaryData.fromObject(model).toObject(OracleTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java deleted file mode 100644 index e18d2484dfc8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcFormatTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OrcFormat; - -public final class OrcFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OrcFormat model = BinaryData.fromString( - "{\"type\":\"OrcFormat\",\"serializer\":\"datanrqa\",\"deserializer\":\"datatnndxolousdvr\",\"\":{\"gyc\":\"dataqmawzjdrpizfu\"}}") - .toObject(OrcFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OrcFormat model = new OrcFormat().withSerializer("datanrqa").withDeserializer("datatnndxolousdvr"); - model = BinaryData.fromObject(model).toObject(OrcFormat.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java deleted file mode 100644 index f953a3a8db25..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSinkTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.OrcSink; -import com.azure.resourcemanager.datafactory.models.OrcWriteSettings; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class OrcSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OrcSink model = BinaryData.fromString( - "{\"type\":\"OrcSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"dataomgse\",\"disableMetricsCollection\":\"datasydhardx\",\"copyBehavior\":\"dataluqfffglf\",\"metadata\":[{\"name\":\"datakierxuvpr\",\"value\":\"dataxewcscuveljfarin\"},{\"name\":\"dataziztgddahymv\",\"value\":\"datajtdhmig\"},{\"name\":\"dataaoexgienylsijqyg\",\"value\":\"datashd\"},{\"name\":\"datahxv\",\"value\":\"datafdsafgkysymhuxs\"}],\"\":{\"wszrbttzsqeyr\":\"datalbpegcetezaa\",\"curfsofshfmgiix\":\"datafhfgoyxxszpai\",\"oefkpuuuxiuwhc\":\"datarveekhsmulvmy\",\"gtmoy\":\"datackekmufip\"}},\"formatSettings\":{\"type\":\"OrcWriteSettings\",\"maxRowsPerFile\":\"datatmaosongtbhvh\",\"fileNamePrefix\":\"datavubwwqgiyu\",\"\":{\"tnsnb\":\"datavwjxmwalhl\",\"vqhodfmortrsnpb\":\"datai\",\"uc\":\"dataungnjkkmkzfb\"}},\"writeBatchSize\":\"datagzjyrdiiwhmrhz\",\"writeBatchTimeout\":\"datavpjydwmaqeytjp\",\"sinkRetryCount\":\"datadp\",\"sinkRetryWait\":\"datapdcsvzugiurhgqlv\",\"maxConcurrentConnections\":\"datajzscrjtnq\",\"disableMetricsCollection\":\"datapobjufksddxk\",\"\":{\"oylfclsrguecbth\":\"dataxlylxfp\",\"lzguvckpdp\":\"datauivgbimededqgyrv\",\"kikqdqiyb\":\"datacnrjq\",\"npisbpxlse\":\"datatlvofjjsetiznnna\"}}") - .toObject(OrcSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OrcSink model = new OrcSink().withWriteBatchSize("datagzjyrdiiwhmrhz") - .withWriteBatchTimeout("datavpjydwmaqeytjp") - .withSinkRetryCount("datadp") - .withSinkRetryWait("datapdcsvzugiurhgqlv") - .withMaxConcurrentConnections("datajzscrjtnq") - .withDisableMetricsCollection("datapobjufksddxk") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("dataomgse") - .withDisableMetricsCollection("datasydhardx") - .withCopyBehavior("dataluqfffglf") - .withMetadata( - Arrays.asList(new MetadataItem().withName("datakierxuvpr").withValue("dataxewcscuveljfarin"), - new MetadataItem().withName("dataziztgddahymv").withValue("datajtdhmig"), - new MetadataItem().withName("dataaoexgienylsijqyg").withValue("datashd"), - new MetadataItem().withName("datahxv").withValue("datafdsafgkysymhuxs"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings( - new OrcWriteSettings().withMaxRowsPerFile("datatmaosongtbhvh").withFileNamePrefix("datavubwwqgiyu")); - model = BinaryData.fromObject(model).toObject(OrcSink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java deleted file mode 100644 index 0c191becf201..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcSourceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OrcSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class OrcSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OrcSource model = BinaryData.fromString( - "{\"type\":\"OrcSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datanqjbav\",\"disableMetricsCollection\":\"datalfefbbv\",\"\":{\"bndr\":\"datannpafufwre\",\"temvaajyit\":\"datazfnstlavmdc\",\"ubryhvbvjyf\":\"datayzgwihkswurza\",\"gwk\":\"datawaupjozgryo\"}},\"additionalColumns\":\"datailyznbb\",\"sourceRetryCount\":\"datamxznfoa\",\"sourceRetryWait\":\"datajwiswzn\",\"maxConcurrentConnections\":\"databhtleiwfiz\",\"disableMetricsCollection\":\"datahvq\",\"\":{\"sazgnyf\":\"dataltn\",\"eninaf\":\"dataqyliagnbhz\"}}") - .toObject(OrcSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OrcSource model = new OrcSource().withSourceRetryCount("datamxznfoa") - .withSourceRetryWait("datajwiswzn") - .withMaxConcurrentConnections("databhtleiwfiz") - .withDisableMetricsCollection("datahvq") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datanqjbav") - .withDisableMetricsCollection("datalfefbbv") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withAdditionalColumns("datailyznbb"); - model = BinaryData.fromObject(model).toObject(OrcSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java deleted file mode 100644 index a54d0c7d30b6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/OrcWriteSettingsTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.OrcWriteSettings; - -public final class OrcWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OrcWriteSettings model = BinaryData.fromString( - "{\"type\":\"OrcWriteSettings\",\"maxRowsPerFile\":\"datavrzdjgmg\",\"fileNamePrefix\":\"datayvbsiaenv\",\"\":{\"vcuwfybxorr\":\"databzpizgau\",\"sqarbtrk\":\"dataeo\",\"efijpjiudnust\":\"datan\"}}") - .toObject(OrcWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OrcWriteSettings model - = new OrcWriteSettings().withMaxRowsPerFile("datavrzdjgmg").withFileNamePrefix("datayvbsiaenv"); - model = BinaryData.fromObject(model).toObject(OrcWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java deleted file mode 100644 index d4058ab38330..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PackageStoreTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.EntityReference; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeEntityReferenceType; -import com.azure.resourcemanager.datafactory.models.PackageStore; -import org.junit.jupiter.api.Assertions; - -public final class PackageStoreTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PackageStore model = BinaryData.fromString( - "{\"name\":\"npb\",\"packageStoreLinkedService\":{\"type\":\"LinkedServiceReference\",\"referenceName\":\"uobclobnaqeizpli\"}}") - .toObject(PackageStore.class); - Assertions.assertEquals("npb", model.name()); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.packageStoreLinkedService().type()); - Assertions.assertEquals("uobclobnaqeizpli", model.packageStoreLinkedService().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PackageStore model = new PackageStore().withName("npb") - .withPackageStoreLinkedService( - new EntityReference().withType(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE) - .withReferenceName("uobclobnaqeizpli")); - model = BinaryData.fromObject(model).toObject(PackageStore.class); - Assertions.assertEquals("npb", model.name()); - Assertions.assertEquals(IntegrationRuntimeEntityReferenceType.LINKED_SERVICE_REFERENCE, - model.packageStoreLinkedService().type()); - Assertions.assertEquals("uobclobnaqeizpli", model.packageStoreLinkedService().referenceName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java deleted file mode 100644 index 50dfe668900f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParameterSpecificationTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import org.junit.jupiter.api.Assertions; - -public final class ParameterSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParameterSpecification model = BinaryData.fromString("{\"type\":\"Int\",\"defaultValue\":\"datasmond\"}") - .toObject(ParameterSpecification.class); - Assertions.assertEquals(ParameterType.INT, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParameterSpecification model - = new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datasmond"); - model = BinaryData.fromObject(model).toObject(ParameterSpecification.class); - Assertions.assertEquals(ParameterType.INT, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java deleted file mode 100644 index e1804a8d47da..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetFormatTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ParquetFormat; - -public final class ParquetFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParquetFormat model = BinaryData.fromString( - "{\"type\":\"ParquetFormat\",\"serializer\":\"datadbtqgkujd\",\"deserializer\":\"dataoxrqw\",\"\":{\"ya\":\"datarbtigap\",\"pdfhtwmmkf\":\"dataikeej\"}}") - .toObject(ParquetFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParquetFormat model = new ParquetFormat().withSerializer("datadbtqgkujd").withDeserializer("dataoxrqw"); - model = BinaryData.fromObject(model).toObject(ParquetFormat.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java deleted file mode 100644 index cc9f2cd8284d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetReadSettingsTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.ParquetReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class ParquetReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParquetReadSettings model = BinaryData.fromString( - "{\"type\":\"ParquetReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"tlpsisx\":\"datasvjtgpyo\",\"owvvxjqrusk\":\"datammxjdkvyzilrlf\"}},\"\":{\"nsqeewfuw\":\"datayfe\",\"azi\":\"datamkca\",\"wmebmx\":\"datahwy\",\"txkurp\":\"datawcf\"}}") - .toObject(ParquetReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParquetReadSettings model = new ParquetReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))); - model = BinaryData.fromObject(model).toObject(ParquetReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java deleted file mode 100644 index 424e5971e1f5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSinkTests.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.ParquetSink; -import com.azure.resourcemanager.datafactory.models.ParquetWriteSettings; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class ParquetSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParquetSink model = BinaryData.fromString( - "{\"type\":\"ParquetSink\",\"storeSettings\":{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"datapzdpujywjmo\",\"disableMetricsCollection\":\"datazzkplqmcam\",\"copyBehavior\":\"dataia\",\"metadata\":[{\"name\":\"datangtjgfp\",\"value\":\"datawugfwp\"},{\"name\":\"datacewbqaibkyeysf\",\"value\":\"datahdydyybztlylh\"},{\"name\":\"datacjq\",\"value\":\"datacie\"},{\"name\":\"datak\",\"value\":\"dataxf\"}],\"\":{\"zieyx\":\"datavecjhbttmhneqd\",\"akkn\":\"dataidabq\",\"fcryqrrs\":\"dataacseqonlj\",\"eydmeuimlhyze\":\"dataqtd\"}},\"formatSettings\":{\"type\":\"ParquetWriteSettings\",\"maxRowsPerFile\":\"datavkzrvy\",\"fileNamePrefix\":\"datalqgyu\",\"\":{\"prrqcaglytxj\":\"dataelyjduzapnopoto\",\"rcehfwcecleaqa\":\"databdpczmzu\",\"fccnwmdpb\":\"dataiqskylwpqrc\"}},\"writeBatchSize\":\"datatknhfxpve\",\"writeBatchTimeout\":\"datauckrzwraqaptqy\",\"sinkRetryCount\":\"datalyu\",\"sinkRetryWait\":\"datapwwnqoub\",\"maxConcurrentConnections\":\"datassvfzjj\",\"disableMetricsCollection\":\"datacxeosylgjpp\",\"\":{\"mckzeaia\":\"datajbqrfwnpwvpnbgyx\",\"qpzqivfgemvuicxw\":\"datacauvlfsct\",\"jatj\":\"datayrydlvfnucgwfl\",\"dyfjsskvzv\":\"datandbrcdumkqhatcko\"}}") - .toObject(ParquetSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParquetSink model = new ParquetSink().withWriteBatchSize("datatknhfxpve") - .withWriteBatchTimeout("datauckrzwraqaptqy") - .withSinkRetryCount("datalyu") - .withSinkRetryWait("datapwwnqoub") - .withMaxConcurrentConnections("datassvfzjj") - .withDisableMetricsCollection("datacxeosylgjpp") - .withStoreSettings(new StoreWriteSettings().withMaxConcurrentConnections("datapzdpujywjmo") - .withDisableMetricsCollection("datazzkplqmcam") - .withCopyBehavior("dataia") - .withMetadata(Arrays.asList(new MetadataItem().withName("datangtjgfp").withValue("datawugfwp"), - new MetadataItem().withName("datacewbqaibkyeysf").withValue("datahdydyybztlylh"), - new MetadataItem().withName("datacjq").withValue("datacie"), - new MetadataItem().withName("datak").withValue("dataxf"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings"))) - .withFormatSettings( - new ParquetWriteSettings().withMaxRowsPerFile("datavkzrvy").withFileNamePrefix("datalqgyu")); - model = BinaryData.fromObject(model).toObject(ParquetSink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java deleted file mode 100644 index 5df531609d78..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetSourceTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.ParquetReadSettings; -import com.azure.resourcemanager.datafactory.models.ParquetSource; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class ParquetSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParquetSource model = BinaryData.fromString( - "{\"type\":\"ParquetSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datakwsaxpbieehpvqf\",\"disableMetricsCollection\":\"datarrjprygjgyovc\",\"\":{\"bkkcxcmjkronxm\":\"dataiismaggktcoyk\",\"utyszhzlvkmirn\":\"datarhwwdfnc\",\"mepbmogtfpksk\":\"datadbza\",\"rlyynkgnychu\":\"datasyoh\"}},\"formatSettings\":{\"type\":\"ParquetReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"oewfg\":\"datatbhjgliioeodgn\",\"ixtyavvexj\":\"datawmm\"}},\"\":{\"vnoqtwqciq\":\"datakonbgegwxjgkrpp\"}},\"additionalColumns\":\"datahkdkv\",\"sourceRetryCount\":\"dataspbdscotidik\",\"sourceRetryWait\":\"datatmjkfmrjngrjsqt\",\"maxConcurrentConnections\":\"datahabhhpcvsdy\",\"disableMetricsCollection\":\"databydrgxvnmt\",\"\":{\"mudwruog\":\"dataxr\",\"llkfjgj\":\"datathfqcyycx\",\"uscm\":\"datawfssgiebq\"}}") - .toObject(ParquetSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParquetSource model = new ParquetSource().withSourceRetryCount("dataspbdscotidik") - .withSourceRetryWait("datatmjkfmrjngrjsqt") - .withMaxConcurrentConnections("datahabhhpcvsdy") - .withDisableMetricsCollection("databydrgxvnmt") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datakwsaxpbieehpvqf") - .withDisableMetricsCollection("datarrjprygjgyovc") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new ParquetReadSettings().withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings")))) - .withAdditionalColumns("datahkdkv"); - model = BinaryData.fromObject(model).toObject(ParquetSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java deleted file mode 100644 index 7258d0b32265..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ParquetWriteSettingsTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ParquetWriteSettings; - -public final class ParquetWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ParquetWriteSettings model = BinaryData.fromString( - "{\"type\":\"ParquetWriteSettings\",\"maxRowsPerFile\":\"dataoxohgkdnhb\",\"fileNamePrefix\":\"dataepuv\",\"\":{\"sq\":\"datazqzjvfrhyxlwqyo\",\"lgmemkyou\":\"dataernbj\"}}") - .toObject(ParquetWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ParquetWriteSettings model - = new ParquetWriteSettings().withMaxRowsPerFile("dataoxohgkdnhb").withFileNamePrefix("dataepuv"); - model = BinaryData.fromObject(model).toObject(ParquetWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java deleted file mode 100644 index 35089a9e5458..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalObjectDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PaypalObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PaypalObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PaypalObjectDataset model = BinaryData.fromString( - "{\"type\":\"PaypalObject\",\"typeProperties\":{\"tableName\":\"datau\"},\"description\":\"ajvvq\",\"structure\":\"datao\",\"schema\":\"dataon\",\"linkedServiceName\":{\"referenceName\":\"livgti\",\"parameters\":{\"mrvkxeojtdyulglh\":\"dataqjqjcajgofytkhhk\",\"xspxgogypbztgae\":\"datalwruklfq\"}},\"parameters\":{\"lvry\":{\"type\":\"Bool\",\"defaultValue\":\"datavctvuzimbwttm\"},\"ajnpdwzjgg\":{\"type\":\"Float\",\"defaultValue\":\"datarnwuk\"},\"ayfiq\":{\"type\":\"Int\",\"defaultValue\":\"dataepemz\"}},\"annotations\":[\"dataxcor\",\"datavudyhgtrttcuayi\"],\"folder\":{\"name\":\"nkmm\"},\"\":{\"qgqexowqzrtgqr\":\"dataf\",\"obothx\":\"datakkvfygkuobpwainp\",\"qgzyvextc\":\"dataewhpnyjt\",\"whdlrifioz\":\"dataslroldow\"}}") - .toObject(PaypalObjectDataset.class); - Assertions.assertEquals("ajvvq", model.description()); - Assertions.assertEquals("livgti", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("lvry").type()); - Assertions.assertEquals("nkmm", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PaypalObjectDataset model = new PaypalObjectDataset().withDescription("ajvvq") - .withStructure("datao") - .withSchema("dataon") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("livgti") - .withParameters(mapOf("mrvkxeojtdyulglh", "dataqjqjcajgofytkhhk", "xspxgogypbztgae", "datalwruklfq"))) - .withParameters(mapOf("lvry", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datavctvuzimbwttm"), - "ajnpdwzjgg", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datarnwuk"), - "ayfiq", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataepemz"))) - .withAnnotations(Arrays.asList("dataxcor", "datavudyhgtrttcuayi")) - .withFolder(new DatasetFolder().withName("nkmm")) - .withTableName("datau"); - model = BinaryData.fromObject(model).toObject(PaypalObjectDataset.class); - Assertions.assertEquals("ajvvq", model.description()); - Assertions.assertEquals("livgti", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("lvry").type()); - Assertions.assertEquals("nkmm", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java deleted file mode 100644 index 910276a1f714..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PaypalSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PaypalSource; - -public final class PaypalSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PaypalSource model = BinaryData.fromString( - "{\"type\":\"PaypalSource\",\"query\":\"datajreo\",\"queryTimeout\":\"dataeoaboozxkdzm\",\"additionalColumns\":\"datamnyufdemrcl\",\"sourceRetryCount\":\"datagpkye\",\"sourceRetryWait\":\"datatkhihixisd\",\"maxConcurrentConnections\":\"dataflkeqgxljsbto\",\"disableMetricsCollection\":\"datawcveqge\",\"\":{\"zlfhhwdajfth\":\"datalxocz\",\"nufqh\":\"dataswuomjd\"}}") - .toObject(PaypalSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PaypalSource model = new PaypalSource().withSourceRetryCount("datagpkye") - .withSourceRetryWait("datatkhihixisd") - .withMaxConcurrentConnections("dataflkeqgxljsbto") - .withDisableMetricsCollection("datawcveqge") - .withQueryTimeout("dataeoaboozxkdzm") - .withAdditionalColumns("datamnyufdemrcl") - .withQuery("datajreo"); - model = BinaryData.fromObject(model).toObject(PaypalSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java deleted file mode 100644 index cd75f0464c69..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PhoenixDatasetTypeProperties; - -public final class PhoenixDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PhoenixDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datahwbdpsesboynpy\",\"table\":\"dataorrvk\",\"schema\":\"datafctaneti\"}") - .toObject(PhoenixDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PhoenixDatasetTypeProperties model = new PhoenixDatasetTypeProperties().withTableName("datahwbdpsesboynpy") - .withTable("dataorrvk") - .withSchema("datafctaneti"); - model = BinaryData.fromObject(model).toObject(PhoenixDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java deleted file mode 100644 index 322e54363af3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixObjectDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PhoenixObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PhoenixObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PhoenixObjectDataset model = BinaryData.fromString( - "{\"type\":\"PhoenixObject\",\"typeProperties\":{\"tableName\":\"databiichgk\",\"table\":\"datasozodwjcfq\",\"schema\":\"dataxryqycymzr\"},\"description\":\"fgdwzauz\",\"structure\":\"datah\",\"schema\":\"datadnyciwzk\",\"linkedServiceName\":{\"referenceName\":\"lykqadfge\",\"parameters\":{\"bri\":\"dataoha\",\"ljqovqmxqsxo\":\"datamadjrsbga\",\"tsxooiobh\":\"dataxqnkiuokg\"}},\"parameters\":{\"srnjlvgrghnh\":{\"type\":\"Array\",\"defaultValue\":\"dataptlsrvqzgaqs\"}},\"annotations\":[\"datarqhjnin\",\"dataeswvnhqkgebzqz\"],\"folder\":{\"name\":\"viujojzdvms\"},\"\":{\"tqhdbitqsb\":\"datasxsxoxvimdv\",\"d\":\"dataujsgomrihumgrm\",\"gfygfkgxbdpb\":\"datavq\"}}") - .toObject(PhoenixObjectDataset.class); - Assertions.assertEquals("fgdwzauz", model.description()); - Assertions.assertEquals("lykqadfge", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("srnjlvgrghnh").type()); - Assertions.assertEquals("viujojzdvms", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PhoenixObjectDataset model = new PhoenixObjectDataset().withDescription("fgdwzauz") - .withStructure("datah") - .withSchema("datadnyciwzk") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("lykqadfge") - .withParameters(mapOf("bri", "dataoha", "ljqovqmxqsxo", "datamadjrsbga", "tsxooiobh", "dataxqnkiuokg"))) - .withParameters(mapOf("srnjlvgrghnh", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataptlsrvqzgaqs"))) - .withAnnotations(Arrays.asList("datarqhjnin", "dataeswvnhqkgebzqz")) - .withFolder(new DatasetFolder().withName("viujojzdvms")) - .withTableName("databiichgk") - .withTable("datasozodwjcfq") - .withSchemaTypePropertiesSchema("dataxryqycymzr"); - model = BinaryData.fromObject(model).toObject(PhoenixObjectDataset.class); - Assertions.assertEquals("fgdwzauz", model.description()); - Assertions.assertEquals("lykqadfge", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("srnjlvgrghnh").type()); - Assertions.assertEquals("viujojzdvms", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java deleted file mode 100644 index 64261d15a9b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PhoenixSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PhoenixSource; - -public final class PhoenixSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PhoenixSource model = BinaryData.fromString( - "{\"type\":\"PhoenixSource\",\"query\":\"dataniob\",\"queryTimeout\":\"dataxxbneiobub\",\"additionalColumns\":\"datayemppwkryz\",\"sourceRetryCount\":\"dataqpk\",\"sourceRetryWait\":\"datauv\",\"maxConcurrentConnections\":\"datai\",\"disableMetricsCollection\":\"datazbhmyhjg\",\"\":{\"fi\":\"dataernckggwiquk\",\"fttmjomuwl\":\"datakeolzizfbunzmx\",\"fzgpvdlx\":\"datavjwkpznsfbit\",\"clcuxzl\":\"datayo\"}}") - .toObject(PhoenixSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PhoenixSource model = new PhoenixSource().withSourceRetryCount("dataqpk") - .withSourceRetryWait("datauv") - .withMaxConcurrentConnections("datai") - .withDisableMetricsCollection("datazbhmyhjg") - .withQueryTimeout("dataxxbneiobub") - .withAdditionalColumns("datayemppwkryz") - .withQuery("dataniob"); - model = BinaryData.fromObject(model).toObject(PhoenixSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java deleted file mode 100644 index f1f7a8b1ec26..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineElapsedTimeMetricPolicyTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; - -public final class PipelineElapsedTimeMetricPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineElapsedTimeMetricPolicy model = BinaryData.fromString("{\"duration\":\"datahwagohbuffkmrqe\"}") - .toObject(PipelineElapsedTimeMetricPolicy.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineElapsedTimeMetricPolicy model - = new PipelineElapsedTimeMetricPolicy().withDuration("datahwagohbuffkmrqe"); - model = BinaryData.fromObject(model).toObject(PipelineElapsedTimeMetricPolicy.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java deleted file mode 100644 index 688c9ce32d2c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineExternalComputeScalePropertiesTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineExternalComputeScaleProperties; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PipelineExternalComputeScalePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineExternalComputeScaleProperties model = BinaryData.fromString( - "{\"timeToLive\":429210484,\"numberOfPipelineNodes\":1775001304,\"numberOfExternalNodes\":1618538006,\"\":{\"ksznf\":\"databxsnx\"}}") - .toObject(PipelineExternalComputeScaleProperties.class); - Assertions.assertEquals(429210484, model.timeToLive()); - Assertions.assertEquals(1775001304, model.numberOfPipelineNodes()); - Assertions.assertEquals(1618538006, model.numberOfExternalNodes()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineExternalComputeScaleProperties model - = new PipelineExternalComputeScaleProperties().withTimeToLive(429210484) - .withNumberOfPipelineNodes(1775001304) - .withNumberOfExternalNodes(1618538006) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(PipelineExternalComputeScaleProperties.class); - Assertions.assertEquals(429210484, model.timeToLive()); - Assertions.assertEquals(1775001304, model.numberOfPipelineNodes()); - Assertions.assertEquals(1618538006, model.numberOfExternalNodes()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java deleted file mode 100644 index 9e9845015545..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineFolderTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineFolder; -import org.junit.jupiter.api.Assertions; - -public final class PipelineFolderTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineFolder model = BinaryData.fromString("{\"name\":\"rnntiewdjcv\"}").toObject(PipelineFolder.class); - Assertions.assertEquals("rnntiewdjcv", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineFolder model = new PipelineFolder().withName("rnntiewdjcv"); - model = BinaryData.fromObject(model).toObject(PipelineFolder.class); - Assertions.assertEquals("rnntiewdjcv", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java deleted file mode 100644 index 961939ae0497..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineListResponseTests.java +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PipelineResourceInner; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; -import com.azure.resourcemanager.datafactory.models.PipelineFolder; -import com.azure.resourcemanager.datafactory.models.PipelineListResponse; -import com.azure.resourcemanager.datafactory.models.PipelinePolicy; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PipelineListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"description\":\"xrbuukzclew\",\"activities\":[{\"type\":\"Activity\",\"name\":\"lw\",\"description\":\"ztzp\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"yfzqwhxxbu\",\"dependencyConditions\":[]},{\"activity\":\"qa\",\"dependencyConditions\":[]},{\"activity\":\"zfeqztppri\",\"dependencyConditions\":[]},{\"activity\":\"lxorjaltolmncws\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"wcsdbnwdcfhucq\",\"value\":\"datapfuvglsbjjca\"},{\"name\":\"vxb\",\"value\":\"datat\"},{\"name\":\"udutnco\",\"value\":\"datamr\"},{\"name\":\"xqtvcofu\",\"value\":\"dataf\"}],\"\":{\"bgdknnqv\":\"datagj\",\"sgsahmkycgr\":\"dataaznqntoru\",\"s\":\"datauwjuetaeburuvdmo\",\"tpuqujmq\":\"datazlxwabmqoefkifr\"}},{\"type\":\"Activity\",\"name\":\"gkfbtndoaong\",\"description\":\"cn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ed\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"waezkojvd\",\"value\":\"datapzfoqoui\"}],\"\":{\"p\":\"dataxarzgszufoxciq\",\"xkhnzbonlwnto\":\"datadoamciodhkha\",\"zcmrvexztvb\":\"datagokdwbwhks\",\"lmnguxaw\":\"dataqgsfraoyzkoow\"}}],\"parameters\":{\"bykutw\":{\"type\":\"Int\",\"defaultValue\":\"datayuuximerqfobwyzn\"},\"sd\":{\"type\":\"Float\",\"defaultValue\":\"datapagmhrskdsnf\"},\"zev\":{\"type\":\"String\",\"defaultValue\":\"datagtdlmk\"},\"ejdcngqqmoakuf\":{\"type\":\"String\",\"defaultValue\":\"dataewpusdsttwvogvb\"}},\"variables\":{\"grtwae\":{\"type\":\"Array\",\"defaultValue\":\"datawr\"},\"inrfdwoyu\":{\"type\":\"String\",\"defaultValue\":\"datazkopb\"},\"mzqhoftrmaequi\":{\"type\":\"Bool\",\"defaultValue\":\"dataiuiefozbhdmsm\"}},\"concurrency\":1964875083,\"annotations\":[\"dataslfaoqzpiyyl\",\"dataalnswhccsphk\",\"dataivwitqscywugg\",\"dataoluhczbwemh\"],\"runDimensions\":{\"wmsweypqwd\":\"datasbrgz\",\"mkttlstvlzywem\":\"dataggicccnxqhue\",\"lusiy\":\"datazrncsdt\"},\"folder\":{\"name\":\"fgytguslfeadcyg\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datahejhzisx\"}}},\"name\":\"pelol\",\"type\":\"vk\",\"etag\":\"pqvujzraehtwdwrf\",\"\":{\"cdl\":\"dataiby\"},\"id\":\"shfwpracstwity\"}],\"nextLink\":\"evxccedcp\"}") - .toObject(PipelineListResponse.class); - Assertions.assertEquals("shfwpracstwity", model.value().get(0).id()); - Assertions.assertEquals("xrbuukzclew", model.value().get(0).description()); - Assertions.assertEquals("lw", model.value().get(0).activities().get(0).name()); - Assertions.assertEquals("ztzp", model.value().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.value().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, - model.value().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("yfzqwhxxbu", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("wcsdbnwdcfhucq", - model.value().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.INT, model.value().get(0).parameters().get("bykutw").type()); - Assertions.assertEquals(VariableType.ARRAY, model.value().get(0).variables().get("grtwae").type()); - Assertions.assertEquals(1964875083, model.value().get(0).concurrency()); - Assertions.assertEquals("fgytguslfeadcyg", model.value().get(0).folder().name()); - Assertions.assertEquals("evxccedcp", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineListResponse model = new PipelineListResponse() - .withValue(Arrays.asList(new PipelineResourceInner().withId("shfwpracstwity") - .withDescription("xrbuukzclew") - .withActivities(Arrays.asList( - new Activity().withName("lw") - .withDescription("ztzp") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("yfzqwhxxbu") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qa") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zfeqztppri") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lxorjaltolmncws") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("wcsdbnwdcfhucq").withValue("datapfuvglsbjjca"), - new UserProperty().withName("vxb").withValue("datat"), - new UserProperty().withName("udutnco").withValue("datamr"), - new UserProperty().withName("xqtvcofu").withValue("dataf"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("gkfbtndoaong") - .withDescription("cn") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ed") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("waezkojvd").withValue("datapzfoqoui"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("bykutw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datayuuximerqfobwyzn"), - "sd", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datapagmhrskdsnf"), - "zev", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datagtdlmk"), - "ejdcngqqmoakuf", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("dataewpusdsttwvogvb"))) - .withVariables(mapOf("grtwae", - new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datawr"), "inrfdwoyu", - new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datazkopb"), - "mzqhoftrmaequi", - new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("dataiuiefozbhdmsm"))) - .withConcurrency(1964875083) - .withAnnotations( - Arrays.asList("dataslfaoqzpiyyl", "dataalnswhccsphk", "dataivwitqscywugg", "dataoluhczbwemh")) - .withRunDimensions( - mapOf("wmsweypqwd", "datasbrgz", "mkttlstvlzywem", "dataggicccnxqhue", "lusiy", "datazrncsdt")) - .withFolder(new PipelineFolder().withName("fgytguslfeadcyg")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datahejhzisx"))) - .withAdditionalProperties(mapOf("name", "pelol", "etag", "pqvujzraehtwdwrf", "type", "vk")))) - .withNextLink("evxccedcp"); - model = BinaryData.fromObject(model).toObject(PipelineListResponse.class); - Assertions.assertEquals("shfwpracstwity", model.value().get(0).id()); - Assertions.assertEquals("xrbuukzclew", model.value().get(0).description()); - Assertions.assertEquals("lw", model.value().get(0).activities().get(0).name()); - Assertions.assertEquals("ztzp", model.value().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.value().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, - model.value().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("yfzqwhxxbu", model.value().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("wcsdbnwdcfhucq", - model.value().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.INT, model.value().get(0).parameters().get("bykutw").type()); - Assertions.assertEquals(VariableType.ARRAY, model.value().get(0).variables().get("grtwae").type()); - Assertions.assertEquals(1964875083, model.value().get(0).concurrency()); - Assertions.assertEquals("fgytguslfeadcyg", model.value().get(0).folder().name()); - Assertions.assertEquals("evxccedcp", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java deleted file mode 100644 index 160be086b9c6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinePolicyTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; -import com.azure.resourcemanager.datafactory.models.PipelinePolicy; - -public final class PipelinePolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelinePolicy model - = BinaryData.fromString("{\"elapsedTimeMetric\":{\"duration\":\"datawr\"}}").toObject(PipelinePolicy.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelinePolicy model - = new PipelinePolicy().withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datawr")); - model = BinaryData.fromObject(model).toObject(PipelinePolicy.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java deleted file mode 100644 index ac0be6aec1af..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineReferenceTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import org.junit.jupiter.api.Assertions; - -public final class PipelineReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineReference model = BinaryData.fromString("{\"referenceName\":\"d\",\"name\":\"rxwhydtlu\"}") - .toObject(PipelineReference.class); - Assertions.assertEquals("d", model.referenceName()); - Assertions.assertEquals("rxwhydtlu", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineReference model = new PipelineReference().withReferenceName("d").withName("rxwhydtlu"); - model = BinaryData.fromObject(model).toObject(PipelineReference.class); - Assertions.assertEquals("d", model.referenceName()); - Assertions.assertEquals("rxwhydtlu", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java deleted file mode 100644 index 707f33af4f8f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineResourceInnerTests.java +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PipelineResourceInner; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; -import com.azure.resourcemanager.datafactory.models.PipelineFolder; -import com.azure.resourcemanager.datafactory.models.PipelinePolicy; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PipelineResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineResourceInner model = BinaryData.fromString( - "{\"properties\":{\"description\":\"dyodnwzxltj\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hlt\",\"description\":\"gcxn\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"byqunyow\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"acizsjqlhkrr\":\"datarkvfgbvfvpdbo\",\"hvxndzwmkrefajpj\":\"databdeibqipqk\",\"yhgbijtjivfx\":\"datarwkq\",\"stawfsdjpvkv\":\"datasjabibs\"}},{\"activity\":\"bjxbkzbzk\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Completed\"],\"\":{\"jjklff\":\"dataudurgkakmokz\",\"bizikayuhq\":\"datamouwqlgzrfzeey\",\"wrv\":\"databjbsybb\"}},{\"activity\":\"ldgmfpgvmpip\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"dsrezpdrhneuyow\":\"dataqfxssmwutw\",\"t\":\"datakdw\"}},{\"activity\":\"sibircgpi\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Skipped\",\"Failed\"],\"\":{\"nokixrjqcirgz\":\"dataanlfzxiavrmbz\"}}],\"userProperties\":[{\"name\":\"lazszrn\",\"value\":\"dataoiindfpwpjy\"},{\"name\":\"wbtlhflsjcdh\",\"value\":\"datazfjvfbgofe\"},{\"name\":\"jagrqmqhldvr\",\"value\":\"dataiiojnal\"}],\"\":{\"ueluqhhahhxvrhmz\":\"datakvtvsexso\"}},{\"type\":\"Activity\",\"name\":\"wpjgwws\",\"description\":\"ghftqsxhqxujxuk\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jguufzdm\",\"dependencyConditions\":[\"Completed\",\"Succeeded\"],\"\":{\"phoszqz\":\"datahwhbotzingamv\",\"kfwynw\":\"datadphqamv\",\"tnvyqiatkzwp\":\"datavtbvkayh\",\"vvsccyajguq\":\"datanpwzcjaes\"}},{\"activity\":\"hwyg\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"uh\":\"datafxusemdwzr\"}},{\"activity\":\"pfcqdp\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Skipped\",\"Skipped\"],\"\":{\"qlmfeoker\":\"datauoymgccelvezry\"}},{\"activity\":\"wkyhkobopgxe\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"vcdwxlpqekftn\":\"datapbqpcrfkbwccsn\",\"fq\":\"datahtjsying\",\"gszywk\":\"datatmtdhtmdvypgik\"}}],\"userProperties\":[{\"name\":\"ryuzh\",\"value\":\"datahkjoqr\"},{\"name\":\"qqaatjinrvgou\",\"value\":\"datamfiibfggj\"},{\"name\":\"ool\",\"value\":\"datarwxkvtkkgl\"}],\"\":{\"hvkzuh\":\"datajygvjayvbl\",\"gsopbyrqufegxu\":\"dataxvvy\",\"bnhlmc\":\"datawz\",\"dn\":\"datal\"}},{\"type\":\"Activity\",\"name\":\"itvgbmhrixkwm\",\"description\":\"jejveg\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xexccbdreaxhcexd\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\",\"Succeeded\"],\"\":{\"jnhyjsvf\":\"dataghtpw\",\"mtg\":\"datacxzbfvoowvr\",\"y\":\"dataqp\"}},{\"activity\":\"s\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Succeeded\"],\"\":{\"ekrrjr\":\"datahgfipnsxkmcw\",\"jglikkxwslolb\":\"dataafxtsgum\",\"elfk\":\"datapvuzlmv\"}}],\"userProperties\":[{\"name\":\"lcrpw\",\"value\":\"dataxeznoi\"},{\"name\":\"brnjwmw\",\"value\":\"datapn\"},{\"name\":\"saz\",\"value\":\"datajjoqkagf\"},{\"name\":\"sxtta\",\"value\":\"datagzxnfaazpxdtnk\"}],\"\":{\"rkpyouaibrebqaay\":\"dataqjjlwuen\",\"ixqtn\":\"dataj\",\"ffiakp\":\"datattezlw\",\"tmmjihyeozph\":\"datapqqmted\"}}],\"parameters\":{\"mdscwxqupev\":{\"type\":\"Float\",\"defaultValue\":\"dataqncygupkvi\"},\"jujbypelmcuvhixb\":{\"type\":\"Float\",\"defaultValue\":\"datastotxh\"},\"yl\":{\"type\":\"Bool\",\"defaultValue\":\"datafw\"}},\"variables\":{\"iwkkbn\":{\"type\":\"Array\",\"defaultValue\":\"datasttp\"}},\"concurrency\":647346434,\"annotations\":[\"datavtylbfpncu\",\"datadoiwi\",\"datathtywub\",\"datacbihwqk\"],\"runDimensions\":{\"dgoihxumwctondzj\":\"datantwjch\",\"fdlwg\":\"datauu\",\"gseinq\":\"dataytsbwtovv\"},\"folder\":{\"name\":\"fxqknpirgneptt\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"dataniffcdmqnroj\"}}},\"name\":\"ijnkrxfrdd\",\"type\":\"ratiz\",\"etag\":\"onasxifto\",\"\":{\"tw\":\"datazh\",\"lgnyhmo\":\"datasgogczhonnxk\",\"h\":\"datasxkkg\",\"hqxvcxgfrpdsofbs\":\"datarghxjb\"},\"id\":\"nsvbuswdv\"}") - .toObject(PipelineResourceInner.class); - Assertions.assertEquals("nsvbuswdv", model.id()); - Assertions.assertEquals("dyodnwzxltj", model.description()); - Assertions.assertEquals("hlt", model.activities().get(0).name()); - Assertions.assertEquals("gcxn", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("byqunyow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lazszrn", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("mdscwxqupev").type()); - Assertions.assertEquals(VariableType.ARRAY, model.variables().get("iwkkbn").type()); - Assertions.assertEquals(647346434, model.concurrency()); - Assertions.assertEquals("fxqknpirgneptt", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineResourceInner model = new PipelineResourceInner().withId("nsvbuswdv") - .withDescription("dyodnwzxltj") - .withActivities(Arrays.asList( - new Activity().withName("hlt") - .withDescription("gcxn") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("byqunyow") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bjxbkzbzk") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ldgmfpgvmpip") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("sibircgpi") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("lazszrn").withValue("dataoiindfpwpjy"), - new UserProperty().withName("wbtlhflsjcdh") - .withValue("datazfjvfbgofe"), - new UserProperty().withName("jagrqmqhldvr").withValue("dataiiojnal"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("wpjgwws") - .withDescription("ghftqsxhqxujxuk") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("jguufzdm") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hwyg") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("pfcqdp") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wkyhkobopgxe") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ryuzh").withValue("datahkjoqr"), - new UserProperty().withName("qqaatjinrvgou").withValue("datamfiibfggj"), - new UserProperty().withName("ool").withValue("datarwxkvtkkgl"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("itvgbmhrixkwm") - .withDescription("jejveg") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("xexccbdreaxhcexd") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("s") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("lcrpw").withValue("dataxeznoi"), - new UserProperty().withName("brnjwmw").withValue("datapn"), - new UserProperty().withName("saz").withValue("datajjoqkagf"), - new UserProperty().withName("sxtta").withValue("datagzxnfaazpxdtnk"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("mdscwxqupev", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataqncygupkvi"), - "jujbypelmcuvhixb", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datastotxh"), "yl", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datafw"))) - .withVariables( - mapOf("iwkkbn", new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datasttp"))) - .withConcurrency(647346434) - .withAnnotations(Arrays.asList("datavtylbfpncu", "datadoiwi", "datathtywub", "datacbihwqk")) - .withRunDimensions(mapOf("dgoihxumwctondzj", "datantwjch", "fdlwg", "datauu", "gseinq", "dataytsbwtovv")) - .withFolder(new PipelineFolder().withName("fxqknpirgneptt")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("dataniffcdmqnroj"))) - .withAdditionalProperties(mapOf("name", "ijnkrxfrdd", "etag", "onasxifto", "type", "ratiz")); - model = BinaryData.fromObject(model).toObject(PipelineResourceInner.class); - Assertions.assertEquals("nsvbuswdv", model.id()); - Assertions.assertEquals("dyodnwzxltj", model.description()); - Assertions.assertEquals("hlt", model.activities().get(0).name()); - Assertions.assertEquals("gcxn", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("byqunyow", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lazszrn", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("mdscwxqupev").type()); - Assertions.assertEquals(VariableType.ARRAY, model.variables().get("iwkkbn").type()); - Assertions.assertEquals(647346434, model.concurrency()); - Assertions.assertEquals("fxqknpirgneptt", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java deleted file mode 100644 index 79a5c6673ef3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInnerTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.core.util.serializer.JacksonAdapter; -import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.resourcemanager.datafactory.fluent.models.PipelineRunInner; -import java.util.HashMap; -import java.util.Map; - -public final class PipelineRunInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineRunInner model = BinaryData.fromString( - "{\"runId\":\"lqol\",\"runGroupId\":\"kcgxxlxsffgcvi\",\"isLatest\":true,\"pipelineName\":\"wlvwlyoupf\",\"parameters\":{\"ubdyhgk\":\"k\",\"tsttktlahbq\":\"minsgowzf\",\"mmqtgqqqxhr\":\"ctxtgzukxi\",\"juisavokqdzf\":\"xrxc\"},\"runDimensions\":{\"nwxyiop\":\"ivjlfrqttbajlka\"},\"invokedBy\":{\"name\":\"qqfkuv\",\"id\":\"xkdmligo\",\"invokedByType\":\"brxk\",\"pipelineName\":\"loazuruocbgoo\",\"pipelineRunId\":\"te\"},\"lastUpdated\":\"2021-05-01T19:42:34Z\",\"runStart\":\"2021-05-18T20:05:21Z\",\"runEnd\":\"2021-08-27T22:46:55Z\",\"durationInMs\":1900106080,\"status\":\"vjgsl\",\"message\":\"dilmyww\",\"\":{\"edabgyvudtjue\":\"datakxn\",\"yxccyb\":\"databcihxuuwhc\",\"px\":\"datapayakkud\"}}") - .toObject(PipelineRunInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineRunInner model = new PipelineRunInner().withAdditionalProperties(mapOf("durationInMs", 1900106080, - "runDimensions", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize("{\"nwxyiop\":\"ivjlfrqttbajlka\"}", Object.class, SerializerEncoding.JSON), - "invokedBy", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"name\":\"qqfkuv\",\"id\":\"xkdmligo\",\"invokedByType\":\"brxk\",\"pipelineName\":\"loazuruocbgoo\",\"pipelineRunId\":\"te\"}", - Object.class, SerializerEncoding.JSON), - "runStart", "2021-05-18T20:05:21Z", "message", "dilmyww", "pipelineName", "wlvwlyoupf", "lastUpdated", - "2021-05-01T19:42:34Z", "isLatest", true, "runId", "lqol", "runEnd", "2021-08-27T22:46:55Z", "runGroupId", - "kcgxxlxsffgcvi", "parameters", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"ubdyhgk\":\"k\",\"tsttktlahbq\":\"minsgowzf\",\"mmqtgqqqxhr\":\"ctxtgzukxi\",\"juisavokqdzf\":\"xrxc\"}", - Object.class, SerializerEncoding.JSON), - "status", "vjgsl")); - model = BinaryData.fromObject(model).toObject(PipelineRunInner.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java deleted file mode 100644 index 91b33a973c70..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunInvokedByTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineRunInvokedBy; - -public final class PipelineRunInvokedByTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PipelineRunInvokedBy model = BinaryData.fromString( - "{\"name\":\"jplmagstcy\",\"id\":\"pfkyrkdbdgiogsj\",\"invokedByType\":\"nwqjnoba\",\"pipelineName\":\"hdd\",\"pipelineRunId\":\"acegfnmntf\"}") - .toObject(PipelineRunInvokedBy.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PipelineRunInvokedBy model = new PipelineRunInvokedBy(); - model = BinaryData.fromObject(model).toObject(PipelineRunInvokedBy.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java deleted file mode 100644 index c512bd8504bb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsCancelWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelineRunsCancelWithResponseMockTests { - @Test - public void testCancelWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.pipelineRuns() - .cancelWithResponse("yp", "vyvobkkek", "dxclqjnnhotwqk", false, com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java deleted file mode 100644 index 986696003f27..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineRunsGetWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.PipelineRun; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelineRunsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"runId\":\"cov\",\"runGroupId\":\"cobfnbdpao\",\"isLatest\":false,\"pipelineName\":\"gfmftrv\",\"parameters\":{\"xwnircmodws\":\"jfkpuszsjayrl\",\"wxwkjambfsxsr\":\"hzlamdqgav\",\"yezwjqb\":\"jfapiodsn\"},\"runDimensions\":{\"hvcorobmqu\":\"lgeuxoymjxqint\"},\"invokedBy\":{\"name\":\"pzk\",\"id\":\"ea\",\"invokedByType\":\"cygyqgvof\",\"pipelineName\":\"guj\",\"pipelineRunId\":\"kwwyfsq\"},\"lastUpdated\":\"2021-11-04T14:17:32Z\",\"runStart\":\"2021-05-15T21:49:42Z\",\"runEnd\":\"2021-08-02T18:51:40Z\",\"durationInMs\":1811088788,\"status\":\"rmuls\",\"message\":\"gohpwnrmhlotknbr\",\"\":{\"xmdmlowesixpw\":\"dataqmudmef\",\"fkdvb\":\"datavtwgnmeqirxwkomj\",\"inj\":\"datacj\",\"zuaxsrmadakj\":\"datawpivfplbajqecngw\"}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PipelineRun response = manager.pipelineRuns() - .getWithResponse("xrrjudgnphgsdq", "wfmvpsvwwtncvnoq", "gnl", com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java deleted file mode 100644 index 4340f829bb82..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelineTests.java +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.Pipeline; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; -import com.azure.resourcemanager.datafactory.models.PipelineFolder; -import com.azure.resourcemanager.datafactory.models.PipelinePolicy; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PipelineTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Pipeline model = BinaryData.fromString( - "{\"description\":\"ybycnunvj\",\"activities\":[{\"type\":\"Activity\",\"name\":\"kfawnopqgikyz\",\"description\":\"txdyuxzejntpsew\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"kr\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"tbghhavgrvkf\":\"dataeoxorggufhyao\",\"mv\":\"dataovjzhpjbibgjmfx\",\"zzxscyhwzdgiruj\":\"datacluyovwxnbkf\",\"ujviylwdshfs\":\"datazbomvzzbtdcqvpni\"}}],\"userProperties\":[{\"name\":\"bgye\",\"value\":\"datarymsgaojfmw\"},{\"name\":\"cotmr\",\"value\":\"datahirctymoxoftpipi\"}],\"\":{\"cpqjlihhyu\":\"datazuhx\",\"x\":\"datapskasdvlmfwdg\",\"sreuzvxurisjnh\":\"datalucvpam\",\"blwpcesutrgj\":\"dataytxifqjzgxmrh\"}}],\"parameters\":{\"w\":{\"type\":\"Float\",\"defaultValue\":\"datatpwoqhihejq\"},\"xjvfoimwksl\":{\"type\":\"SecureString\",\"defaultValue\":\"datafqntcyp\"},\"ydfce\":{\"type\":\"String\",\"defaultValue\":\"dataizjx\"}},\"variables\":{\"mrtwna\":{\"type\":\"String\",\"defaultValue\":\"datavygdyft\"}},\"concurrency\":951831262,\"annotations\":[\"dataiw\",\"dataojgcyzt\",\"datafmznba\"],\"runDimensions\":{\"huwrykqgaifm\":\"datahchqnrnrpx\"},\"folder\":{\"name\":\"lb\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datahbejdznxcvdsrhnj\"}}}") - .toObject(Pipeline.class); - Assertions.assertEquals("ybycnunvj", model.description()); - Assertions.assertEquals("kfawnopqgikyz", model.activities().get(0).name()); - Assertions.assertEquals("txdyuxzejntpsew", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("kr", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bgye", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("w").type()); - Assertions.assertEquals(VariableType.STRING, model.variables().get("mrtwna").type()); - Assertions.assertEquals(951831262, model.concurrency()); - Assertions.assertEquals("lb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Pipeline model = new Pipeline().withDescription("ybycnunvj") - .withActivities(Arrays.asList(new Activity().withName("kfawnopqgikyz") - .withDescription("txdyuxzejntpsew") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("kr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("bgye").withValue("datarymsgaojfmw"), - new UserProperty().withName("cotmr").withValue("datahirctymoxoftpipi"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("w", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datatpwoqhihejq"), - "xjvfoimwksl", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datafqntcyp"), - "ydfce", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataizjx"))) - .withVariables(mapOf("mrtwna", - new VariableSpecification().withType(VariableType.STRING).withDefaultValue("datavygdyft"))) - .withConcurrency(951831262) - .withAnnotations(Arrays.asList("dataiw", "dataojgcyzt", "datafmznba")) - .withRunDimensions(mapOf("huwrykqgaifm", "datahchqnrnrpx")) - .withFolder(new PipelineFolder().withName("lb")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datahbejdznxcvdsrhnj"))); - model = BinaryData.fromObject(model).toObject(Pipeline.class); - Assertions.assertEquals("ybycnunvj", model.description()); - Assertions.assertEquals("kfawnopqgikyz", model.activities().get(0).name()); - Assertions.assertEquals("txdyuxzejntpsew", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("kr", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bgye", model.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("w").type()); - Assertions.assertEquals(VariableType.STRING, model.variables().get("mrtwna").type()); - Assertions.assertEquals(951831262, model.concurrency()); - Assertions.assertEquals("lb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 590a52e8159a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy; -import com.azure.resourcemanager.datafactory.models.PipelineFolder; -import com.azure.resourcemanager.datafactory.models.PipelinePolicy; -import com.azure.resourcemanager.datafactory.models.PipelineResource; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelinesCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"description\":\"fulvmvalvcahy\",\"activities\":[{\"type\":\"Activity\",\"name\":\"dht\",\"description\":\"p\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"w\",\"dependencyConditions\":[]},{\"activity\":\"cghxmeigkvm\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"azkuemotgkyf\",\"value\":\"datadx\"},{\"name\":\"wqkfsv\",\"value\":\"dataczisiqns\"},{\"name\":\"vwjfuhqbatdn\",\"value\":\"datafvzxosrstevdts\"},{\"name\":\"aubmdojimfaa\",\"value\":\"datapjslrfpxlutfbhs\"}],\"\":{\"boyqescvcv\":\"datafcbxtabxd\",\"jppmil\":\"datatarur\"}},{\"type\":\"Activity\",\"name\":\"ikqz\",\"description\":\"kxjcqdnzhj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"vipjinji\",\"dependencyConditions\":[]},{\"activity\":\"xocfmkcnjzxezon\",\"dependencyConditions\":[]},{\"activity\":\"ltewthslztxix\",\"dependencyConditions\":[]},{\"activity\":\"gweuxyc\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fldfwqnbco\",\"value\":\"datanlscfbwkhle\"},{\"name\":\"mibo\",\"value\":\"datargxdcnbzpcxoqum\"},{\"name\":\"pzekm\",\"value\":\"datapdvnanxrkwzlaomt\"}],\"\":{\"xyfjeibcge\":\"datattmhsrwqp\",\"mr\":\"dataipoequjkhu\",\"hinmhccwmrckvlbc\":\"dataxcbptvvwfamhl\"}},{\"type\":\"Activity\",\"name\":\"wdgydbsrj\",\"description\":\"xoktokmsyo\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"ttcmwqrbtad\",\"dependencyConditions\":[]},{\"activity\":\"dkbndkofrhuycn\",\"dependencyConditions\":[]},{\"activity\":\"yhodtugrw\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fkgzgveud\",\"value\":\"dataidtnsqtrtcacdomz\"}],\"\":{\"ox\":\"datajt\",\"a\":\"datazt\",\"nssghafzdzdf\":\"dataicznotggy\"}}],\"parameters\":{\"tiqzjrxhelqh\":{\"type\":\"String\",\"defaultValue\":\"datautzuriqlksbayy\"}},\"variables\":{\"wjzqmbe\":{\"type\":\"String\",\"defaultValue\":\"datadalstetgymd\"},\"rdxquowe\":{\"type\":\"Bool\",\"defaultValue\":\"datadnkgrxhpxsbhua\"},\"qwdme\":{\"type\":\"String\",\"defaultValue\":\"dataxzduydnvvwoclmdc\"},\"wuwrtubemptxmue\":{\"type\":\"Array\",\"defaultValue\":\"datajeuguvnwcvlmy\"}},\"concurrency\":369979478,\"annotations\":[\"datammwpqcdmfrjqfe\",\"datagdkxiprrvfy\",\"datavkmom\"],\"runDimensions\":{\"jutupgmmtit\":\"dataevqbcdjlnnvhb\",\"ybgxxm\":\"datapoqqakpbkwqavxlj\",\"boiyqi\":\"datadrgxhrtans\"},\"folder\":{\"name\":\"jhvkttusyxz\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datayoqjttriviftjjmt\"}}},\"name\":\"gdgfjvitdp\",\"type\":\"oesx\",\"etag\":\"vslhncasp\",\"\":{\"ir\":\"datalaxvndqhatwxqagg\",\"lmdhuu\":\"datajhaicyu\"},\"id\":\"iecn\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PipelineResource response = manager.pipelines() - .define("xtzy") - .withExistingFactory("fuovkgqtzg", "t") - .withDescription("qmp") - .withActivities(Arrays.asList( - new Activity().withName("iioa") - .withDescription("ykthxudowjw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("dcohsqufsyihsnz") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("xuogyakexjzalhu").withValue("databm"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("oh") - .withDescription("xliojods") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("qlrwwmukxk") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("sf").withValue("datakjfrtaufrxxvz"), - new UserProperty().withName("ineqmjodvknxjt").withValue("datatk"), - new UserProperty().withName("mhquc").withValue("datas"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("gafznzemis") - .withDescription("nxwosanch") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("tdeumlf") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zxqr") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("b") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("nkeodgpqdcrnubnt") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("htuiws").withValue("dataccmun"), - new UserProperty().withName("vw").withValue("datasgczvuiprngne"), - new UserProperty().withName("m").withValue("datazdayzfu"), - new UserProperty().withName("bnelmi").withValue("datamccevbpr"))) - .withAdditionalProperties(mapOf("type", "Activity")))) - .withParameters(mapOf("gowkazmwrxsfejkr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datawg"), - "gfewflxby", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datafjnozityqqoswksh"), - "tmeendocqaptwkbi", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataufxxvsyl"), "z", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datanwhazalftta"))) - .withVariables(mapOf("xzhbfibzvxqh", - new VariableSpecification().withType(VariableType.BOOL).withDefaultValue("datasrduqhrlltfec"), - "vdsydjrhxjawfujv", - new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("datadbzhlc"))) - .withConcurrency(1085443779) - .withAnnotations(Arrays.asList("dataaroohppupucy")) - .withRunDimensions(mapOf("gxexeaexweeifogv", "dataavelcbmmrh", "omgvgarx", "datamqdnfonncnfjygg", - "mjygnixkpadjqjwl", "datar", "n", "dataqeibucmfvuizjrs")) - .withFolder(new PipelineFolder().withName("ezxldmz")) - .withPolicy(new PipelinePolicy() - .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("datawydsv"))) - .withIfMatch("fnxdi") - .create(); - - Assertions.assertEquals("iecn", response.id()); - Assertions.assertEquals("fulvmvalvcahy", response.description()); - Assertions.assertEquals("dht", response.activities().get(0).name()); - Assertions.assertEquals("p", response.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, response.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, response.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("w", response.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("azkuemotgkyf", response.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.STRING, response.parameters().get("tiqzjrxhelqh").type()); - Assertions.assertEquals(VariableType.STRING, response.variables().get("wjzqmbe").type()); - Assertions.assertEquals(369979478, response.concurrency()); - Assertions.assertEquals("jhvkttusyxz", response.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java deleted file mode 100644 index bcf4c133cdb1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesCreateRunWithResponseMockTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.CreateRunResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelinesCreateRunWithResponseMockTests { - @Test - public void testCreateRunWithResponse() throws Exception { - String responseStr = "{\"runId\":\"dhzltmywy\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - CreateRunResponse response = manager.pipelines() - .createRunWithResponse("nnqii", "hpxxwbetmqugov", "ddxlrbs", "qrgjejabqvg", false, "iqyazpxlyabj", false, - mapOf("kuxphbwmbgwgmyg", "datassjfwur", "z", "datansnkylqdsyg", "ewqwdglmfsjpl", "dataufr"), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("dhzltmywy", response.runId()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java deleted file mode 100644 index f4488a014f3b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelinesDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.pipelines() - .deleteWithResponse("vbvicwfrybvhg", "ltjghdfusphokcc", "ynnm", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java deleted file mode 100644 index 918192862a30..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesGetWithResponseMockTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineResource; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelinesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"description\":\"ktlofgpnswvcsekw\",\"activities\":[{\"type\":\"Activity\",\"name\":\"fpoqbekkqsaby\",\"description\":\"frwp\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ncyflgtqrow\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qexwkkj\",\"value\":\"datacj\"},{\"name\":\"mnkeaia\",\"value\":\"datahzj\"},{\"name\":\"udrclzro\",\"value\":\"dataxirtt\"}],\"\":{\"ee\":\"dataqgaajbfkdflqsbek\",\"ebtvnskyg\":\"datawdojpjaq\"}}],\"parameters\":{\"zh\":{\"type\":\"String\",\"defaultValue\":\"datatsy\"},\"nykf\":{\"type\":\"Object\",\"defaultValue\":\"datacbzi\"},\"eltnevbkkdbhgurn\":{\"type\":\"SecureString\",\"defaultValue\":\"datamdqghtb\"}},\"variables\":{\"lieyyfqhndj\":{\"type\":\"String\",\"defaultValue\":\"datarj\"},\"fhsgpy\":{\"type\":\"Array\",\"defaultValue\":\"datavuyxccraj\"}},\"concurrency\":1496349843,\"annotations\":[\"datad\"],\"runDimensions\":{\"oiufrqsmjgddbunx\":\"datalpsjbnnuqszy\",\"s\":\"datafata\"},\"folder\":{\"name\":\"gdwhacurmmbuna\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datavnyzcn\"}}},\"name\":\"isuhareqyiadvv\",\"type\":\"dfyelpnlp\",\"etag\":\"yuxcjqyfx\",\"\":{\"ygecly\":\"datatukossiflfv\"},\"id\":\"oshkzibbjbzdnkg\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PipelineResource response = manager.pipelines() - .getWithResponse("cmhttiqbn", "yixkeavbezz", "flddvvcwhod", "wvzxrfrax", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("oshkzibbjbzdnkg", response.id()); - Assertions.assertEquals("ktlofgpnswvcsekw", response.description()); - Assertions.assertEquals("fpoqbekkqsaby", response.activities().get(0).name()); - Assertions.assertEquals("frwp", response.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, response.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, response.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("ncyflgtqrow", response.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("qexwkkj", response.activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.STRING, response.parameters().get("zh").type()); - Assertions.assertEquals(VariableType.STRING, response.variables().get("lieyyfqhndj").type()); - Assertions.assertEquals(1496349843, response.concurrency()); - Assertions.assertEquals("gdwhacurmmbuna", response.folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java deleted file mode 100644 index 028dad447850..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PipelinesListByFactoryMockTests.java +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PipelineResource; -import com.azure.resourcemanager.datafactory.models.VariableType; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PipelinesListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"description\":\"ipato\",\"activities\":[{\"type\":\"Activity\",\"name\":\"r\",\"description\":\"pmcjrfj\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"fptynhulefltu\",\"dependencyConditions\":[]},{\"activity\":\"wpebbl\",\"dependencyConditions\":[]},{\"activity\":\"dlahrd\",\"dependencyConditions\":[]},{\"activity\":\"ytkehfoephiphoyg\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"q\",\"value\":\"dataoukimvhqis\"},{\"name\":\"vologfxbvl\",\"value\":\"datacf\"}],\"\":{\"qnh\":\"datajnce\",\"i\":\"datagbm\",\"auohtn\":\"datade\"}},{\"type\":\"Activity\",\"name\":\"tahdtdceuhjxv\",\"description\":\"rx\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"rbgcprsd\",\"dependencyConditions\":[]},{\"activity\":\"swozpm\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"nxwkfesursby\",\"value\":\"dataoavozqnnlmxitvmr\"},{\"name\":\"bkzchc\",\"value\":\"datauvskdvqyfubwxc\"}],\"\":{\"md\":\"datarvjpfojhv\"}}],\"parameters\":{\"suwghtgpgarh\":{\"type\":\"Int\",\"defaultValue\":\"datazhundfkpdxfvjdf\"},\"adp\":{\"type\":\"Int\",\"defaultValue\":\"datadedi\"}},\"variables\":{\"eakv\":{\"type\":\"String\",\"defaultValue\":\"datamwqsdzflexkfsgr\"},\"hfzriigte\":{\"type\":\"String\",\"defaultValue\":\"datakmnuivpbjcl\"}},\"concurrency\":203718836,\"annotations\":[\"datajqjoamzdsajn\",\"datatkqbvtdeouqixgtp\",\"datakbjev\"],\"runDimensions\":{\"chh\":\"datauwdvfaulbf\",\"phxhvbfekxbcbu\":\"datacobp\",\"md\":\"datajysukezqohth\",\"uypo\":\"dataaoypryu\"},\"folder\":{\"name\":\"dclajujs\"},\"policy\":{\"elapsedTimeMetric\":{\"duration\":\"datareyrgrgft\"}}},\"name\":\"hxddmaevcjtr\",\"type\":\"cnwqeixyjlfobj\",\"etag\":\"et\",\"\":{\"feolhs\":\"dataloduvcqowcg\",\"nsiynzdadku\":\"dataskivlzvxmqvlgcp\",\"xollcsdgmcj\":\"datawgtypnj\"},\"id\":\"te\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.pipelines().listByFactory("mxidj", "ptruiegrauyphugw", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("te", response.iterator().next().id()); - Assertions.assertEquals("ipato", response.iterator().next().description()); - Assertions.assertEquals("r", response.iterator().next().activities().get(0).name()); - Assertions.assertEquals("pmcjrfj", response.iterator().next().activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, response.iterator().next().activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, - response.iterator().next().activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("fptynhulefltu", - response.iterator().next().activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("q", response.iterator().next().activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals(ParameterType.INT, response.iterator().next().parameters().get("suwghtgpgarh").type()); - Assertions.assertEquals(VariableType.STRING, response.iterator().next().variables().get("eakv").type()); - Assertions.assertEquals(203718836, response.iterator().next().concurrency()); - Assertions.assertEquals("dclajujs", response.iterator().next().folder().name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java deleted file mode 100644 index 033057aa59d2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PolybaseSettingsTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PolybaseSettings; -import com.azure.resourcemanager.datafactory.models.PolybaseSettingsRejectType; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PolybaseSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PolybaseSettings model = BinaryData.fromString( - "{\"rejectType\":\"percentage\",\"rejectValue\":\"dataxydsompncearkj\",\"rejectSampleValue\":\"datapa\",\"useTypeDefault\":\"datak\",\"\":{\"anm\":\"dataakgrwtpes\"}}") - .toObject(PolybaseSettings.class); - Assertions.assertEquals(PolybaseSettingsRejectType.PERCENTAGE, model.rejectType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PolybaseSettings model = new PolybaseSettings().withRejectType(PolybaseSettingsRejectType.PERCENTAGE) - .withRejectValue("dataxydsompncearkj") - .withRejectSampleValue("datapa") - .withUseTypeDefault("datak") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(PolybaseSettings.class); - Assertions.assertEquals(PolybaseSettingsRejectType.PERCENTAGE, model.rejectType()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java deleted file mode 100644 index 0a87a3195478..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PostgreSqlSource; - -public final class PostgreSqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlSource model = BinaryData.fromString( - "{\"type\":\"PostgreSqlSource\",\"query\":\"dataljlwfqrfyw\",\"queryTimeout\":\"datasipkhqh\",\"additionalColumns\":\"datatcztmqdkhohspkgx\",\"sourceRetryCount\":\"datavlyaprjzbx\",\"sourceRetryWait\":\"dataqfrntzbhmxl\",\"maxConcurrentConnections\":\"datafauvgt\",\"disableMetricsCollection\":\"datanozsmy\",\"\":{\"bmrwhknefcoo\":\"dataap\",\"pdd\":\"datatmd\",\"laxuybxjwny\":\"datagupiosibg\",\"fiksjpkig\":\"dataskyrttnrikss\"}}") - .toObject(PostgreSqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlSource model = new PostgreSqlSource().withSourceRetryCount("datavlyaprjzbx") - .withSourceRetryWait("dataqfrntzbhmxl") - .withMaxConcurrentConnections("datafauvgt") - .withDisableMetricsCollection("datanozsmy") - .withQueryTimeout("datasipkhqh") - .withAdditionalColumns("datatcztmqdkhohspkgx") - .withQuery("dataljlwfqrfyw"); - model = BinaryData.fromObject(model).toObject(PostgreSqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java deleted file mode 100644 index 603f0f38cecb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PostgreSqlTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PostgreSqlTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlTableDataset model = BinaryData.fromString( - "{\"type\":\"PostgreSqlTable\",\"typeProperties\":{\"tableName\":\"dataxlhmpmhef\",\"table\":\"dataaphqeofytl\",\"schema\":\"datalowmcm\"},\"description\":\"ixuanccqvjfdgfq\",\"structure\":\"dataquxpjhcfaara\",\"schema\":\"dataiovmufzgugrblwal\",\"linkedServiceName\":{\"referenceName\":\"ossnq\",\"parameters\":{\"sman\":\"dataotbptg\",\"ecvtamqwzmno\":\"dataxrwqfmd\",\"wpsibxovuqoq\":\"datafe\"}},\"parameters\":{\"cvtqnzjcyqqzhemb\":{\"type\":\"Bool\",\"defaultValue\":\"datandyclwg\"},\"isj\":{\"type\":\"String\",\"defaultValue\":\"datanalbad\"}},\"annotations\":[\"dataajvmvvlooubsfxip\",\"dataeopsk\",\"dataocjomlup\",\"dataazusjcd\"],\"folder\":{\"name\":\"lgdwzrgdqyx\"},\"\":{\"cwwsj\":\"datalgrcavqcwyzoqzkm\",\"qxilefejs\":\"datakiixepbntqqwwgfg\"}}") - .toObject(PostgreSqlTableDataset.class); - Assertions.assertEquals("ixuanccqvjfdgfq", model.description()); - Assertions.assertEquals("ossnq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("cvtqnzjcyqqzhemb").type()); - Assertions.assertEquals("lgdwzrgdqyx", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlTableDataset model = new PostgreSqlTableDataset().withDescription("ixuanccqvjfdgfq") - .withStructure("dataquxpjhcfaara") - .withSchema("dataiovmufzgugrblwal") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ossnq") - .withParameters(mapOf("sman", "dataotbptg", "ecvtamqwzmno", "dataxrwqfmd", "wpsibxovuqoq", "datafe"))) - .withParameters(mapOf("cvtqnzjcyqqzhemb", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datandyclwg"), "isj", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datanalbad"))) - .withAnnotations(Arrays.asList("dataajvmvvlooubsfxip", "dataeopsk", "dataocjomlup", "dataazusjcd")) - .withFolder(new DatasetFolder().withName("lgdwzrgdqyx")) - .withTableName("dataxlhmpmhef") - .withTable("dataaphqeofytl") - .withSchemaTypePropertiesSchema("datalowmcm"); - model = BinaryData.fromObject(model).toObject(PostgreSqlTableDataset.class); - Assertions.assertEquals("ixuanccqvjfdgfq", model.description()); - Assertions.assertEquals("ossnq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("cvtqnzjcyqqzhemb").type()); - Assertions.assertEquals("lgdwzrgdqyx", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java deleted file mode 100644 index b8070227abfe..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlTableDatasetTypeProperties; - -public final class PostgreSqlTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlTableDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"datarznequqyntt\",\"table\":\"datanhajksbsyogjmq\",\"schema\":\"datagcydijnmcv\"}") - .toObject(PostgreSqlTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlTableDatasetTypeProperties model - = new PostgreSqlTableDatasetTypeProperties().withTableName("datarznequqyntt") - .withTable("datanhajksbsyogjmq") - .withSchema("datagcydijnmcv"); - model = BinaryData.fromObject(model).toObject(PostgreSqlTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java deleted file mode 100644 index f612bf16b664..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2SourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PostgreSqlV2Source; - -public final class PostgreSqlV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlV2Source model = BinaryData.fromString( - "{\"type\":\"PostgreSqlV2Source\",\"query\":\"datatoqtui\",\"queryTimeout\":\"datapbfsxps\",\"additionalColumns\":\"dataevz\",\"sourceRetryCount\":\"dataydnokkkgiec\",\"sourceRetryWait\":\"dataftsneftzetjclaq\",\"maxConcurrentConnections\":\"datab\",\"disableMetricsCollection\":\"datamagpdsuyywnaqgoo\",\"\":{\"hufsgc\":\"datahvgddfzcnylzw\",\"gtdeg\":\"datawr\",\"otjj\":\"datahofoptbiuik\",\"snr\":\"dataecxvkqjpovjvvx\"}}") - .toObject(PostgreSqlV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlV2Source model = new PostgreSqlV2Source().withSourceRetryCount("dataydnokkkgiec") - .withSourceRetryWait("dataftsneftzetjclaq") - .withMaxConcurrentConnections("datab") - .withDisableMetricsCollection("datamagpdsuyywnaqgoo") - .withQueryTimeout("datapbfsxps") - .withAdditionalColumns("dataevz") - .withQuery("datatoqtui"); - model = BinaryData.fromObject(model).toObject(PostgreSqlV2Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java deleted file mode 100644 index 1ce8bbbd9292..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PostgreSqlV2TableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PostgreSqlV2TableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlV2TableDataset model = BinaryData.fromString( - "{\"type\":\"PostgreSqlV2Table\",\"typeProperties\":{\"table\":\"datasfcriqxzix\",\"schema\":\"datalxwmvcdkucpx\"},\"description\":\"afr\",\"structure\":\"datagorogeuvmk\",\"schema\":\"datapnr\",\"linkedServiceName\":{\"referenceName\":\"jseml\",\"parameters\":{\"tikelpmwgr\":\"datarsnqpljpetex\",\"udhvos\":\"dataub\",\"gq\":\"datajzscuezalivdf\"}},\"parameters\":{\"rxggezkhzp\":{\"type\":\"Array\",\"defaultValue\":\"dataf\"},\"yzaivnpsjnpckpl\":{\"type\":\"String\",\"defaultValue\":\"dataissenerru\"},\"pl\":{\"type\":\"Float\",\"defaultValue\":\"dataduonbdawsa\"}},\"annotations\":[\"datanbkxjarsbb\",\"dataddwokqxail\",\"dataqk\",\"datayqjvzvc\"],\"folder\":{\"name\":\"spzesfkqqxuhvz\"},\"\":{\"j\":\"datarouszxacdwukokgo\",\"ubcmun\":\"datafuk\",\"fkrfnkcni\":\"datagbtzvxxvsbcu\"}}") - .toObject(PostgreSqlV2TableDataset.class); - Assertions.assertEquals("afr", model.description()); - Assertions.assertEquals("jseml", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rxggezkhzp").type()); - Assertions.assertEquals("spzesfkqqxuhvz", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlV2TableDataset model = new PostgreSqlV2TableDataset().withDescription("afr") - .withStructure("datagorogeuvmk") - .withSchema("datapnr") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("jseml") - .withParameters(mapOf("tikelpmwgr", "datarsnqpljpetex", "udhvos", "dataub", "gq", "datajzscuezalivdf"))) - .withParameters(mapOf("rxggezkhzp", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("dataf"), "yzaivnpsjnpckpl", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataissenerru"), "pl", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataduonbdawsa"))) - .withAnnotations(Arrays.asList("datanbkxjarsbb", "dataddwokqxail", "dataqk", "datayqjvzvc")) - .withFolder(new DatasetFolder().withName("spzesfkqqxuhvz")) - .withTable("datasfcriqxzix") - .withSchemaTypePropertiesSchema("datalxwmvcdkucpx"); - model = BinaryData.fromObject(model).toObject(PostgreSqlV2TableDataset.class); - Assertions.assertEquals("afr", model.description()); - Assertions.assertEquals("jseml", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("rxggezkhzp").type()); - Assertions.assertEquals("spzesfkqqxuhvz", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java deleted file mode 100644 index f6f06acabfc2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PostgreSqlV2TableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PostgreSqlV2TableDatasetTypeProperties; - -public final class PostgreSqlV2TableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PostgreSqlV2TableDatasetTypeProperties model - = BinaryData.fromString("{\"table\":\"dataswxmfurqm\",\"schema\":\"datawwp\"}") - .toObject(PostgreSqlV2TableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PostgreSqlV2TableDatasetTypeProperties model - = new PostgreSqlV2TableDatasetTypeProperties().withTable("dataswxmfurqm").withSchema("datawwp"); - model = BinaryData.fromObject(model).toObject(PostgreSqlV2TableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java deleted file mode 100644 index 0960d45f8654..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkMappingTests.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PowerQuerySink; -import com.azure.resourcemanager.datafactory.models.PowerQuerySinkMapping; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PowerQuerySinkMappingTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PowerQuerySinkMapping model = BinaryData.fromString( - "{\"queryName\":\"esbjohrvkpnmaa\",\"dataflowSinks\":[{\"script\":\"aiegrxooqq\",\"schemaLinkedService\":{\"referenceName\":\"mbuzkay\",\"parameters\":{\"yfgjxmgwk\":\"dataycogwj\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"lnosql\",\"parameters\":{\"zbvkoxlj\":\"datavwauqxhqcvai\",\"gtziyl\":\"datavefbio\",\"lkzzlokmrudepzl\":\"dataxoaallveezesdn\",\"vgpvhtx\":\"datauzcwlbefjh\"}},\"name\":\"tah\",\"description\":\"jgfp\",\"dataset\":{\"referenceName\":\"fmgudkfoybih\",\"parameters\":{\"wmddvfubr\":\"datakwtatkkufb\",\"rnggwujyukjfsb\":\"dataomff\"}},\"linkedService\":{\"referenceName\":\"gb\",\"parameters\":{\"gkbf\":\"dataihxwfmuwhgx\",\"avqqxtyhdik\":\"dataqsislaubij\",\"ryi\":\"dataratzgxta\",\"ovu\":\"datalfb\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"pichsbzgw\",\"datasetParameters\":\"dataarbjjswzkzwtfeg\",\"parameters\":{\"zl\":\"datawtagfebuqudewj\",\"cddwmnsapg\":\"datamuuljnang\"},\"\":{\"grzntpqvhkjbgcqq\":\"datapajr\"}}},{\"script\":\"tmyzoiqaijnahelf\",\"schemaLinkedService\":{\"referenceName\":\"ayplakqgljcr\",\"parameters\":{\"jbsfpaomlg\":\"dataxzeki\",\"pypsjokjjrj\":\"datanazefti\",\"jt\":\"datauql\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"tcjimsge\",\"parameters\":{\"nijcxvqjwli\":\"datawwedbcrkepjnyrtl\",\"vnbk\":\"datapqlna\"}},\"name\":\"qassnwvwluzs\",\"description\":\"qxbtkwatwvzj\",\"dataset\":{\"referenceName\":\"dn\",\"parameters\":{\"nrwua\":\"datawgrqiqlopbjxokm\"}},\"linkedService\":{\"referenceName\":\"jhoshin\",\"parameters\":{\"rgvcoulxh\":\"datauqepl\",\"eumyxpsovypwvhj\":\"datavurs\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"gsfmgypqmcal\",\"datasetParameters\":\"datazmix\",\"parameters\":{\"inzukzb\":\"datarhjfsehtzjbuz\",\"dhleuabsiqnyjjfj\":\"databc\",\"up\":\"datags\",\"jmbzph\":\"datanxexafql\"},\"\":{\"ltddif\":\"datajsgbcr\",\"afbijaqyiyefle\":\"datawxe\"}}},{\"script\":\"fackfupyivq\",\"schemaLinkedService\":{\"referenceName\":\"zxyzlxo\",\"parameters\":{\"rdfzynfmyjqrn\":\"datatkbperkeyhybcsx\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"gmntzeauifcz\",\"parameters\":{\"tztfnjpwbxannjrx\":\"datatfvzdobhejjb\",\"qwlbxny\":\"datathqjvoydeg\",\"azfrsm\":\"datakppnzaluafx\",\"rollntvfqrjfzfhs\":\"datagvfmbs\"}},\"name\":\"dsraxztain\",\"description\":\"xkzcfxzcp\",\"dataset\":{\"referenceName\":\"pqpwefzlreonsq\",\"parameters\":{\"mgra\":\"datagnfd\",\"lssqv\":\"datamftziracz\",\"njswnjoni\":\"dataghznltjxstjge\"}},\"linkedService\":{\"referenceName\":\"qwxsxswineyjerf\",\"parameters\":{\"smgsftk\":\"datalppnmrftnf\",\"gk\":\"datap\",\"zeemsfpmoiykts\":\"datafbfn\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"nikmwnzcenk\",\"datasetParameters\":\"dataqlvqkkceb\",\"parameters\":{\"shoeqpv\":\"datacuj\",\"mlkvaiolfrce\":\"datakpgi\",\"ajksmwrbw\":\"datacvsreicpsv\"},\"\":{\"qskkpoufupadtpb\":\"dataoccrla\",\"evdzvnyzh\":\"dataz\",\"agkndnenozsfln\":\"datat\"}}}]}") - .toObject(PowerQuerySinkMapping.class); - Assertions.assertEquals("esbjohrvkpnmaa", model.queryName()); - Assertions.assertEquals("tah", model.dataflowSinks().get(0).name()); - Assertions.assertEquals("jgfp", model.dataflowSinks().get(0).description()); - Assertions.assertEquals("fmgudkfoybih", model.dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("gb", model.dataflowSinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("pichsbzgw", model.dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("mbuzkay", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("lnosql", model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("aiegrxooqq", model.dataflowSinks().get(0).script()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PowerQuerySinkMapping model - = new PowerQuerySinkMapping().withQueryName("esbjohrvkpnmaa") - .withDataflowSinks( - Arrays - .asList( - new PowerQuerySink().withName("tah") - .withDescription("jgfp") - .withDataset(new DatasetReference().withReferenceName("fmgudkfoybih") - .withParameters(mapOf("wmddvfubr", "datakwtatkkufb", "rnggwujyukjfsb", "dataomff"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("gb") - .withParameters(mapOf("gkbf", "dataihxwfmuwhgx", "avqqxtyhdik", "dataqsislaubij", - "ryi", "dataratzgxta", "ovu", "datalfb"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("pichsbzgw") - .withDatasetParameters("dataarbjjswzkzwtfeg") - .withParameters(mapOf("zl", "datawtagfebuqudewj", "cddwmnsapg", "datamuuljnang")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("mbuzkay") - .withParameters(mapOf("yfgjxmgwk", "dataycogwj"))) - .withRejectedDataLinkedService(new LinkedServiceReference().withReferenceName("lnosql") - .withParameters(mapOf("zbvkoxlj", "datavwauqxhqcvai", "gtziyl", "datavefbio", - "lkzzlokmrudepzl", "dataxoaallveezesdn", "vgpvhtx", "datauzcwlbefjh"))) - .withScript("aiegrxooqq"), - new PowerQuerySink().withName("qassnwvwluzs") - .withDescription("qxbtkwatwvzj") - .withDataset(new DatasetReference().withReferenceName("dn") - .withParameters(mapOf("nrwua", "datawgrqiqlopbjxokm"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("jhoshin") - .withParameters(mapOf("rgvcoulxh", "datauqepl", "eumyxpsovypwvhj", "datavurs"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("gsfmgypqmcal") - .withDatasetParameters("datazmix") - .withParameters(mapOf("inzukzb", "datarhjfsehtzjbuz", "dhleuabsiqnyjjfj", "databc", - "up", "datags", "jmbzph", "datanxexafql")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ayplakqgljcr") - .withParameters(mapOf("jbsfpaomlg", "dataxzeki", "pypsjokjjrj", "datanazefti", "jt", - "datauql"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("tcjimsge") - .withParameters( - mapOf("nijcxvqjwli", "datawwedbcrkepjnyrtl", "vnbk", "datapqlna"))) - .withScript("tmyzoiqaijnahelf"), - new PowerQuerySink().withName("dsraxztain") - .withDescription("xkzcfxzcp") - .withDataset(new DatasetReference().withReferenceName("pqpwefzlreonsq") - .withParameters(mapOf("mgra", "datagnfd", "lssqv", "datamftziracz", "njswnjoni", - "dataghznltjxstjge"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("qwxsxswineyjerf") - .withParameters(mapOf("smgsftk", "datalppnmrftnf", "gk", "datap", "zeemsfpmoiykts", - "datafbfn"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("nikmwnzcenk") - .withDatasetParameters("dataqlvqkkceb") - .withParameters(mapOf("shoeqpv", "datacuj", "mlkvaiolfrce", "datakpgi", "ajksmwrbw", - "datacvsreicpsv")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference() - .withReferenceName("zxyzlxo") - .withParameters(mapOf("rdfzynfmyjqrn", "datatkbperkeyhybcsx"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("gmntzeauifcz") - .withParameters( - mapOf("tztfnjpwbxannjrx", "datatfvzdobhejjb", "qwlbxny", "datathqjvoydeg", - "azfrsm", "datakppnzaluafx", "rollntvfqrjfzfhs", "datagvfmbs"))) - .withScript("fackfupyivq"))); - model = BinaryData.fromObject(model).toObject(PowerQuerySinkMapping.class); - Assertions.assertEquals("esbjohrvkpnmaa", model.queryName()); - Assertions.assertEquals("tah", model.dataflowSinks().get(0).name()); - Assertions.assertEquals("jgfp", model.dataflowSinks().get(0).description()); - Assertions.assertEquals("fmgudkfoybih", model.dataflowSinks().get(0).dataset().referenceName()); - Assertions.assertEquals("gb", model.dataflowSinks().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, - model.dataflowSinks().get(0).flowlet().type()); - Assertions.assertEquals("pichsbzgw", model.dataflowSinks().get(0).flowlet().referenceName()); - Assertions.assertEquals("mbuzkay", model.dataflowSinks().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("lnosql", model.dataflowSinks().get(0).rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("aiegrxooqq", model.dataflowSinks().get(0).script()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java deleted file mode 100644 index ef47a9b5ebb1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySinkTests.java +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PowerQuerySink; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PowerQuerySinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PowerQuerySink model = BinaryData.fromString( - "{\"script\":\"gpterdiu\",\"schemaLinkedService\":{\"referenceName\":\"i\",\"parameters\":{\"go\":\"datakwztsdetj\"}},\"rejectedDataLinkedService\":{\"referenceName\":\"fcqpol\",\"parameters\":{\"oxlvocptvdxxheig\":\"dataysdgzyyb\",\"ghjhjvmabzzbwa\":\"datalilw\",\"mdafbgymqt\":\"datab\"}},\"name\":\"apr\",\"description\":\"jxrjnbsc\",\"dataset\":{\"referenceName\":\"xavipneychbj\",\"parameters\":{\"xz\":\"datafsgnw\"}},\"linkedService\":{\"referenceName\":\"p\",\"parameters\":{\"rxipmlnfyzav\":\"datafb\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"ypi\",\"datasetParameters\":\"databkpdjtaqhsmq\",\"parameters\":{\"zqulptkbv\":\"datadgonjhxshthmgp\",\"blornsih\":\"datapxtzhigqqbtimpk\",\"jakx\":\"datahudsmusuaa\"},\"\":{\"ixyxvqbanosjt\":\"datafczmnn\",\"mgm\":\"datairnb\",\"p\":\"datadorgmynltw\",\"mfd\":\"datatmfoeajogsyxwet\"}}}") - .toObject(PowerQuerySink.class); - Assertions.assertEquals("apr", model.name()); - Assertions.assertEquals("jxrjnbsc", model.description()); - Assertions.assertEquals("xavipneychbj", model.dataset().referenceName()); - Assertions.assertEquals("p", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("ypi", model.flowlet().referenceName()); - Assertions.assertEquals("i", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("fcqpol", model.rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("gpterdiu", model.script()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PowerQuerySink model - = new PowerQuerySink().withName("apr") - .withDescription("jxrjnbsc") - .withDataset( - new DatasetReference().withReferenceName("xavipneychbj").withParameters(mapOf("xz", "datafsgnw"))) - .withLinkedService( - new LinkedServiceReference().withReferenceName("p").withParameters(mapOf("rxipmlnfyzav", "datafb"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("ypi") - .withDatasetParameters("databkpdjtaqhsmq") - .withParameters(mapOf("zqulptkbv", "datadgonjhxshthmgp", "blornsih", "datapxtzhigqqbtimpk", "jakx", - "datahudsmusuaa")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("i").withParameters(mapOf("go", "datakwztsdetj"))) - .withRejectedDataLinkedService( - new LinkedServiceReference().withReferenceName("fcqpol") - .withParameters(mapOf("oxlvocptvdxxheig", "dataysdgzyyb", "ghjhjvmabzzbwa", "datalilw", - "mdafbgymqt", "datab"))) - .withScript("gpterdiu"); - model = BinaryData.fromObject(model).toObject(PowerQuerySink.class); - Assertions.assertEquals("apr", model.name()); - Assertions.assertEquals("jxrjnbsc", model.description()); - Assertions.assertEquals("xavipneychbj", model.dataset().referenceName()); - Assertions.assertEquals("p", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("ypi", model.flowlet().referenceName()); - Assertions.assertEquals("i", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("fcqpol", model.rejectedDataLinkedService().referenceName()); - Assertions.assertEquals("gpterdiu", model.script()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java deleted file mode 100644 index f971a7d7256f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQuerySourceTests.java +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PowerQuerySource; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PowerQuerySourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PowerQuerySource model = BinaryData.fromString( - "{\"script\":\"yp\",\"schemaLinkedService\":{\"referenceName\":\"qvmtywhlakx\",\"parameters\":{\"fxampqcrzgeuqx\":\"datapewpyj\",\"adr\":\"datapiatwfaujegqd\",\"hjkrukizyhgs\":\"datakgd\"}},\"name\":\"tnqsktx\",\"description\":\"p\",\"dataset\":{\"referenceName\":\"qggweeiwdh\",\"parameters\":{\"lhxd\":\"datacgbfzuscstun\"}},\"linkedService\":{\"referenceName\":\"klciichgjsysm\",\"parameters\":{\"bdujgcwxvecbb\":\"datadgwxfkzsifcu\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rdxrizagbbgiarks\",\"datasetParameters\":\"datapgdqxwabzrw\",\"parameters\":{\"kifmmainw\":\"dataxhaclcdosqkptjq\",\"izazz\":\"dataedxkpbqwuntob\",\"vydjufbnk\":\"datalw\",\"dabalfdxaglzfytl\":\"datablaxpegj\"},\"\":{\"xouvmrs\":\"dataqho\",\"qsdb\":\"dataflikyypzkgxfxfmy\",\"nm\":\"dataoksz\"}}}") - .toObject(PowerQuerySource.class); - Assertions.assertEquals("tnqsktx", model.name()); - Assertions.assertEquals("p", model.description()); - Assertions.assertEquals("qggweeiwdh", model.dataset().referenceName()); - Assertions.assertEquals("klciichgjsysm", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("rdxrizagbbgiarks", model.flowlet().referenceName()); - Assertions.assertEquals("qvmtywhlakx", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("yp", model.script()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PowerQuerySource model - = new PowerQuerySource().withName("tnqsktx") - .withDescription("p") - .withDataset(new DatasetReference().withReferenceName("qggweeiwdh") - .withParameters(mapOf("lhxd", "datacgbfzuscstun"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("klciichgjsysm") - .withParameters(mapOf("bdujgcwxvecbb", "datadgwxfkzsifcu"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rdxrizagbbgiarks") - .withDatasetParameters("datapgdqxwabzrw") - .withParameters(mapOf("kifmmainw", "dataxhaclcdosqkptjq", "izazz", "dataedxkpbqwuntob", "vydjufbnk", - "datalw", "dabalfdxaglzfytl", "datablaxpegj")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService( - new LinkedServiceReference().withReferenceName("qvmtywhlakx") - .withParameters(mapOf("fxampqcrzgeuqx", "datapewpyj", "adr", "datapiatwfaujegqd", - "hjkrukizyhgs", "datakgd"))) - .withScript("yp"); - model = BinaryData.fromObject(model).toObject(PowerQuerySource.class); - Assertions.assertEquals("tnqsktx", model.name()); - Assertions.assertEquals("p", model.description()); - Assertions.assertEquals("qggweeiwdh", model.dataset().referenceName()); - Assertions.assertEquals("klciichgjsysm", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("rdxrizagbbgiarks", model.flowlet().referenceName()); - Assertions.assertEquals("qvmtywhlakx", model.schemaLinkedService().referenceName()); - Assertions.assertEquals("yp", model.script()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java deleted file mode 100644 index 7e61ae508d7f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PowerQueryTypePropertiesTests.java +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PowerQueryTypeProperties; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PowerQuerySource; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PowerQueryTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PowerQueryTypeProperties model = BinaryData.fromString( - "{\"sources\":[{\"script\":\"ou\",\"schemaLinkedService\":{\"referenceName\":\"ud\",\"parameters\":{\"ncdgmoqueq\":\"datakaprhknqiijg\",\"ldxwhieproqksm\":\"datahkkyowltjouw\",\"yotnplfacqo\":\"dataxmcvprstvkitbfjt\",\"wetjtdrhutf\":\"datacqrq\"}},\"name\":\"oadtxopgehpadkmd\",\"description\":\"sszxvctkbbxuh\",\"dataset\":{\"referenceName\":\"lsi\",\"parameters\":{\"yngsuxxcz\":\"dataclabv\"}},\"linkedService\":{\"referenceName\":\"yqjoghdsa\",\"parameters\":{\"ormovdxxurntujmo\":\"dataa\",\"wemhdee\":\"datalu\",\"rhrhtsl\":\"datajslkyozdsfzjue\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vxjnxv\",\"datasetParameters\":\"databfiobpnjodfc\",\"parameters\":{\"q\":\"dataqwm\"},\"\":{\"cywnfyszza\":\"dataxsazuxejgw\",\"ozsyvrm\":\"datazsinqbdnddb\",\"eeih\":\"datajmyitrchwudl\"}}}],\"script\":\"mnoejhqlfmsib\",\"documentLocale\":\"yrfgxkyd\"}") - .toObject(PowerQueryTypeProperties.class); - Assertions.assertEquals("oadtxopgehpadkmd", model.sources().get(0).name()); - Assertions.assertEquals("sszxvctkbbxuh", model.sources().get(0).description()); - Assertions.assertEquals("lsi", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yqjoghdsa", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("vxjnxv", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("ud", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ou", model.sources().get(0).script()); - Assertions.assertEquals("mnoejhqlfmsib", model.script()); - Assertions.assertEquals("yrfgxkyd", model.documentLocale()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PowerQueryTypeProperties model - = new PowerQueryTypeProperties() - .withSources( - Arrays.asList(new PowerQuerySource().withName("oadtxopgehpadkmd") - .withDescription("sszxvctkbbxuh") - .withDataset(new DatasetReference().withReferenceName("lsi") - .withParameters(mapOf("yngsuxxcz", "dataclabv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("yqjoghdsa") - .withParameters(mapOf("ormovdxxurntujmo", "dataa", "wemhdee", "datalu", "rhrhtsl", - "datajslkyozdsfzjue"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vxjnxv") - .withDatasetParameters("databfiobpnjodfc") - .withParameters(mapOf("q", "dataqwm")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("ud") - .withParameters(mapOf("ncdgmoqueq", "datakaprhknqiijg", "ldxwhieproqksm", - "datahkkyowltjouw", "yotnplfacqo", "dataxmcvprstvkitbfjt", "wetjtdrhutf", "datacqrq"))) - .withScript("ou"))) - .withScript("mnoejhqlfmsib") - .withDocumentLocale("yrfgxkyd"); - model = BinaryData.fromObject(model).toObject(PowerQueryTypeProperties.class); - Assertions.assertEquals("oadtxopgehpadkmd", model.sources().get(0).name()); - Assertions.assertEquals("sszxvctkbbxuh", model.sources().get(0).description()); - Assertions.assertEquals("lsi", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("yqjoghdsa", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("vxjnxv", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("ud", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("ou", model.sources().get(0).script()); - Assertions.assertEquals("mnoejhqlfmsib", model.script()); - Assertions.assertEquals("yrfgxkyd", model.documentLocale()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java deleted file mode 100644 index 8054a7776ed5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PrestoDatasetTypeProperties; - -public final class PrestoDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrestoDatasetTypeProperties model = BinaryData - .fromString( - "{\"tableName\":\"databcjywkdywkszavua\",\"table\":\"datanefictptplko\",\"schema\":\"datajbz\"}") - .toObject(PrestoDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrestoDatasetTypeProperties model = new PrestoDatasetTypeProperties().withTableName("databcjywkdywkszavua") - .withTable("datanefictptplko") - .withSchema("datajbz"); - model = BinaryData.fromObject(model).toObject(PrestoDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java deleted file mode 100644 index 5427c3606cd3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.PrestoObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class PrestoObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrestoObjectDataset model = BinaryData.fromString( - "{\"type\":\"PrestoObject\",\"typeProperties\":{\"tableName\":\"datadhnpjnezjighdu\",\"table\":\"datatpmrzwvwetqf\",\"schema\":\"dataxvfhuqhn\"},\"description\":\"qx\",\"structure\":\"datasot\",\"schema\":\"datalmr\",\"linkedServiceName\":{\"referenceName\":\"tujydeatwxpxbxed\",\"parameters\":{\"ugfnlvvkswurxdq\":\"databoceksramqch\",\"qcnbnwivlqcwyzh\":\"datavhauimnntfkqp\",\"eirta\":\"datadqkzstzpzecdl\"}},\"parameters\":{\"au\":{\"type\":\"SecureString\",\"defaultValue\":\"dataimt\"}},\"annotations\":[\"dataudzpsjqrmlujmt\"],\"folder\":{\"name\":\"ofwuzeb\"},\"\":{\"yenfspetxeu\":\"datampjbh\",\"devzpfr\":\"datawkhdlckdoxocj\",\"vrlhfxmrhjn\":\"dataortwwyjm\"}}") - .toObject(PrestoObjectDataset.class); - Assertions.assertEquals("qx", model.description()); - Assertions.assertEquals("tujydeatwxpxbxed", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("au").type()); - Assertions.assertEquals("ofwuzeb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrestoObjectDataset model = new PrestoObjectDataset().withDescription("qx") - .withStructure("datasot") - .withSchema("datalmr") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tujydeatwxpxbxed") - .withParameters(mapOf("ugfnlvvkswurxdq", "databoceksramqch", "qcnbnwivlqcwyzh", "datavhauimnntfkqp", - "eirta", "datadqkzstzpzecdl"))) - .withParameters(mapOf("au", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataimt"))) - .withAnnotations(Arrays.asList("dataudzpsjqrmlujmt")) - .withFolder(new DatasetFolder().withName("ofwuzeb")) - .withTableName("datadhnpjnezjighdu") - .withTable("datatpmrzwvwetqf") - .withSchemaTypePropertiesSchema("dataxvfhuqhn"); - model = BinaryData.fromObject(model).toObject(PrestoObjectDataset.class); - Assertions.assertEquals("qx", model.description()); - Assertions.assertEquals("tujydeatwxpxbxed", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("au").type()); - Assertions.assertEquals("ofwuzeb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java deleted file mode 100644 index d74cccb31801..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrestoSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrestoSource; - -public final class PrestoSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrestoSource model = BinaryData.fromString( - "{\"type\":\"PrestoSource\",\"query\":\"datawmgqcfggjequ\",\"queryTimeout\":\"datampgfspwhfhdguu\",\"additionalColumns\":\"dataavvzvdfytqz\",\"sourceRetryCount\":\"datam\",\"sourceRetryWait\":\"datawpwrfet\",\"maxConcurrentConnections\":\"datag\",\"disableMetricsCollection\":\"dataeufhkoernrjm\",\"\":{\"qjvdde\":\"dataasaent\"}}") - .toObject(PrestoSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrestoSource model = new PrestoSource().withSourceRetryCount("datam") - .withSourceRetryWait("datawpwrfet") - .withMaxConcurrentConnections("datag") - .withDisableMetricsCollection("dataeufhkoernrjm") - .withQueryTimeout("datampgfspwhfhdguu") - .withAdditionalColumns("dataavvzvdfytqz") - .withQuery("datawmgqcfggjequ"); - model = BinaryData.fromObject(model).toObject(PrestoSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java deleted file mode 100644 index 9d28a6c0ebce..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndPointConnectionsListByFactoryMockTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PrivateEndPointConnectionsListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"provisioningState\":\"xjqqozxtnowv\",\"privateEndpoint\":{\"id\":\"y\"},\"privateLinkServiceConnectionState\":{\"status\":\"syym\",\"description\":\"ueeokvqwmwvdm\",\"actionsRequired\":\"amqfeqsbjyg\"}},\"name\":\"ke\",\"type\":\"tqaomihrtbksd\",\"etag\":\"pxwficzzoxnl\",\"id\":\"xndsiqxzaolzko\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.privateEndPointConnections() - .listByFactory("ptqxksi", "njfhajgizzrlxn", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("xndsiqxzaolzko", response.iterator().next().id()); - Assertions.assertEquals("syym", - response.iterator().next().properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("ueeokvqwmwvdm", - response.iterator().next().properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("amqfeqsbjyg", - response.iterator().next().properties().privateLinkServiceConnectionState().actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java deleted file mode 100644 index e235867b3e57..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionListResponseTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PrivateEndpointConnectionResourceInner; -import com.azure.resourcemanager.datafactory.models.ArmIdWrapper; -import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionListResponse; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import com.azure.resourcemanager.datafactory.models.RemotePrivateEndpointConnection; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class PrivateEndpointConnectionListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateEndpointConnectionListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"provisioningState\":\"htomflrytswfp\",\"privateEndpoint\":{\"id\":\"gycxnmskwhqjjys\"},\"privateLinkServiceConnectionState\":{\"status\":\"lpshhkvpedwqslsr\",\"description\":\"pq\",\"actionsRequired\":\"wskondcbrwimuvqe\"}},\"name\":\"so\",\"type\":\"rrleaesinuqt\",\"etag\":\"qobbpihehcec\",\"id\":\"mrqbrjbbmpxdlv\"}],\"nextLink\":\"frexcrseqw\"}") - .toObject(PrivateEndpointConnectionListResponse.class); - Assertions.assertEquals("mrqbrjbbmpxdlv", model.value().get(0).id()); - Assertions.assertEquals("lpshhkvpedwqslsr", - model.value().get(0).properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("pq", - model.value().get(0).properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("wskondcbrwimuvqe", - model.value().get(0).properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("frexcrseqw", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateEndpointConnectionListResponse model = new PrivateEndpointConnectionListResponse() - .withValue(Arrays.asList(new PrivateEndpointConnectionResourceInner().withId("mrqbrjbbmpxdlv") - .withProperties(new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState( - new PrivateLinkConnectionState().withStatus("lpshhkvpedwqslsr") - .withDescription("pq") - .withActionsRequired("wskondcbrwimuvqe"))))) - .withNextLink("frexcrseqw"); - model = BinaryData.fromObject(model).toObject(PrivateEndpointConnectionListResponse.class); - Assertions.assertEquals("mrqbrjbbmpxdlv", model.value().get(0).id()); - Assertions.assertEquals("lpshhkvpedwqslsr", - model.value().get(0).properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("pq", - model.value().get(0).properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("wskondcbrwimuvqe", - model.value().get(0).properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("frexcrseqw", model.nextLink()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index 26a91e880eef..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.PrivateEndpoint; -import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequest; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PrivateEndpointConnectionOperationsCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"provisioningState\":\"efncqfewqnzn\",\"privateEndpoint\":{\"id\":\"mkitsvkuopev\"},\"privateLinkServiceConnectionState\":{\"status\":\"bopounugxnzpqe\",\"description\":\"alvorzudysndiexb\",\"actionsRequired\":\"wjmqn\"}},\"name\":\"erggqaohax\",\"type\":\"bzzeiorengkuny\",\"etag\":\"wuuoharv\",\"id\":\"zzvjc\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PrivateEndpointConnectionResource response - = manager.privateEndpointConnectionOperations() - .define("xahzjntoqfxoapw") - .withExistingFactory("ebberydeoeyef", "nibhqieytup") - .withProperties( - new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("xomug") - .withDescription("rygwfcwacchix") - .withActionsRequired("frxsr")) - .withPrivateEndpoint(new PrivateEndpoint().withId("jzuonttfvj"))) - .withIfMatch("qovbrfsayiiv") - .create(); - - Assertions.assertEquals("zzvjc", response.id()); - Assertions.assertEquals("bopounugxnzpqe", response.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("alvorzudysndiexb", - response.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("wjmqn", response.properties().privateLinkServiceConnectionState().actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java deleted file mode 100644 index 5fcf3d63953a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PrivateEndpointConnectionOperationsDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.privateEndpointConnectionOperations() - .deleteWithResponse("mygmitguvxv", "ugnbdegvxzhob", "xu", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java deleted file mode 100644 index 95f82199d7ff..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionOperationsGetWithResponseMockTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.PrivateEndpointConnectionResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PrivateEndpointConnectionOperationsGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"provisioningState\":\"d\",\"privateEndpoint\":{\"id\":\"mftrqaewuqpolt\"},\"privateLinkServiceConnectionState\":{\"status\":\"kofvzpbddvqt\",\"description\":\"yzbrkzsaxmhns\",\"actionsRequired\":\"dwxflmkcmfidr\"}},\"name\":\"nuththzejkusz\",\"type\":\"aehtgzr\",\"etag\":\"ctrvvjamxgq\",\"id\":\"ypxxerjuoykom\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PrivateEndpointConnectionResource response = manager.privateEndpointConnectionOperations() - .getWithResponse("niragbb", "ukm", "mthioaeohw", "lcen", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("ypxxerjuoykom", response.id()); - Assertions.assertEquals("kofvzpbddvqt", response.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("yzbrkzsaxmhns", - response.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("dwxflmkcmfidr", - response.properties().privateLinkServiceConnectionState().actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java deleted file mode 100644 index eb21c009ddfa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointConnectionResourceInnerTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PrivateEndpointConnectionResourceInner; -import com.azure.resourcemanager.datafactory.models.ArmIdWrapper; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import com.azure.resourcemanager.datafactory.models.RemotePrivateEndpointConnection; -import org.junit.jupiter.api.Assertions; - -public final class PrivateEndpointConnectionResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateEndpointConnectionResourceInner model = BinaryData.fromString( - "{\"properties\":{\"provisioningState\":\"ghudg\",\"privateEndpoint\":{\"id\":\"ogjggsvoujkxibda\"},\"privateLinkServiceConnectionState\":{\"status\":\"kmdyomkxfbvfbh\",\"description\":\"i\",\"actionsRequired\":\"pwpgddei\"}},\"name\":\"wzovgk\",\"type\":\"muikjcjcaztbws\",\"etag\":\"qowxwcom\",\"id\":\"kytwvcz\"}") - .toObject(PrivateEndpointConnectionResourceInner.class); - Assertions.assertEquals("kytwvcz", model.id()); - Assertions.assertEquals("kmdyomkxfbvfbh", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("i", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("pwpgddei", model.properties().privateLinkServiceConnectionState().actionsRequired()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateEndpointConnectionResourceInner model = new PrivateEndpointConnectionResourceInner().withId("kytwvcz") - .withProperties(new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("kmdyomkxfbvfbh") - .withDescription("i") - .withActionsRequired("pwpgddei"))); - model = BinaryData.fromObject(model).toObject(PrivateEndpointConnectionResourceInner.class); - Assertions.assertEquals("kytwvcz", model.id()); - Assertions.assertEquals("kmdyomkxfbvfbh", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("i", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("pwpgddei", model.properties().privateLinkServiceConnectionState().actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java deleted file mode 100644 index 8ac117dd5ce1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateEndpointTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateEndpoint; -import org.junit.jupiter.api.Assertions; - -public final class PrivateEndpointTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateEndpoint model = BinaryData.fromString("{\"id\":\"pphkixkykxds\"}").toObject(PrivateEndpoint.class); - Assertions.assertEquals("pphkixkykxds", model.id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateEndpoint model = new PrivateEndpoint().withId("pphkixkykxds"); - model = BinaryData.fromObject(model).toObject(PrivateEndpoint.class); - Assertions.assertEquals("pphkixkykxds", model.id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java deleted file mode 100644 index fa2dd6f60aa2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestResourceTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateEndpoint; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequest; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequestResource; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import org.junit.jupiter.api.Assertions; - -public final class PrivateLinkConnectionApprovalRequestResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkConnectionApprovalRequestResource model = BinaryData.fromString( - "{\"properties\":{\"privateLinkServiceConnectionState\":{\"status\":\"xzujksrlsm\",\"description\":\"sqplpvmjcd\",\"actionsRequired\":\"wb\"},\"privateEndpoint\":{\"id\":\"vteo\"}},\"name\":\"vgp\",\"type\":\"deugf\",\"etag\":\"zec\",\"id\":\"xw\"}") - .toObject(PrivateLinkConnectionApprovalRequestResource.class); - Assertions.assertEquals("xw", model.id()); - Assertions.assertEquals("xzujksrlsm", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("sqplpvmjcd", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("wb", model.properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("vteo", model.properties().privateEndpoint().id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkConnectionApprovalRequestResource model - = new PrivateLinkConnectionApprovalRequestResource().withId("xw") - .withProperties(new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("xzujksrlsm") - .withDescription("sqplpvmjcd") - .withActionsRequired("wb")) - .withPrivateEndpoint(new PrivateEndpoint().withId("vteo"))); - model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionApprovalRequestResource.class); - Assertions.assertEquals("xw", model.id()); - Assertions.assertEquals("xzujksrlsm", model.properties().privateLinkServiceConnectionState().status()); - Assertions.assertEquals("sqplpvmjcd", model.properties().privateLinkServiceConnectionState().description()); - Assertions.assertEquals("wb", model.properties().privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("vteo", model.properties().privateEndpoint().id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java deleted file mode 100644 index 5ce86a218100..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionApprovalRequestTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateEndpoint; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprovalRequest; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import org.junit.jupiter.api.Assertions; - -public final class PrivateLinkConnectionApprovalRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkConnectionApprovalRequest model = BinaryData.fromString( - "{\"privateLinkServiceConnectionState\":{\"status\":\"ykhv\",\"description\":\"xepmrut\",\"actionsRequired\":\"abaobnslujdjltym\"},\"privateEndpoint\":{\"id\":\"guihywar\"}}") - .toObject(PrivateLinkConnectionApprovalRequest.class); - Assertions.assertEquals("ykhv", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("xepmrut", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("abaobnslujdjltym", model.privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("guihywar", model.privateEndpoint().id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkConnectionApprovalRequest model = new PrivateLinkConnectionApprovalRequest() - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("ykhv") - .withDescription("xepmrut") - .withActionsRequired("abaobnslujdjltym")) - .withPrivateEndpoint(new PrivateEndpoint().withId("guihywar")); - model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionApprovalRequest.class); - Assertions.assertEquals("ykhv", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("xepmrut", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("abaobnslujdjltym", model.privateLinkServiceConnectionState().actionsRequired()); - Assertions.assertEquals("guihywar", model.privateEndpoint().id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java deleted file mode 100644 index d4677ff3808b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkConnectionStateTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import org.junit.jupiter.api.Assertions; - -public final class PrivateLinkConnectionStateTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkConnectionState model = BinaryData - .fromString("{\"status\":\"gguxhemlwyw\",\"description\":\"eczgfb\",\"actionsRequired\":\"klelssxb\"}") - .toObject(PrivateLinkConnectionState.class); - Assertions.assertEquals("gguxhemlwyw", model.status()); - Assertions.assertEquals("eczgfb", model.description()); - Assertions.assertEquals("klelssxb", model.actionsRequired()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkConnectionState model = new PrivateLinkConnectionState().withStatus("gguxhemlwyw") - .withDescription("eczgfb") - .withActionsRequired("klelssxb"); - model = BinaryData.fromObject(model).toObject(PrivateLinkConnectionState.class); - Assertions.assertEquals("gguxhemlwyw", model.status()); - Assertions.assertEquals("eczgfb", model.description()); - Assertions.assertEquals("klelssxb", model.actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java deleted file mode 100644 index 8e84a20031b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResourceProperties; - -public final class PrivateLinkResourcePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkResourceProperties model = BinaryData - .fromString( - "{\"groupId\":\"iwzcxmjpbyeph\",\"requiredMembers\":[\"vljvrc\"],\"requiredZoneNames\":[\"qipgx\"]}") - .toObject(PrivateLinkResourceProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkResourceProperties model = new PrivateLinkResourceProperties(); - model = BinaryData.fromObject(model).toObject(PrivateLinkResourceProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java deleted file mode 100644 index 1fe83f082cf5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResource; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResourceProperties; -import org.junit.jupiter.api.Assertions; - -public final class PrivateLinkResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkResource model = BinaryData.fromString( - "{\"properties\":{\"groupId\":\"qlkccuzgygqwaho\",\"requiredMembers\":[\"wgniipr\",\"lvawuwzdufypivls\",\"bjpmcubk\",\"ifoxxkubvphav\"],\"requiredZoneNames\":[\"brbqgvgovp\"]},\"name\":\"ttefjoknssq\",\"type\":\"qedikdfrd\",\"etag\":\"qmrjg\",\"id\":\"hfqlgg\"}") - .toObject(PrivateLinkResource.class); - Assertions.assertEquals("hfqlgg", model.id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkResource model - = new PrivateLinkResource().withId("hfqlgg").withProperties(new PrivateLinkResourceProperties()); - model = BinaryData.fromObject(model).toObject(PrivateLinkResource.class); - Assertions.assertEquals("hfqlgg", model.id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java deleted file mode 100644 index c6147f2ed5b2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesGetWithResponseMockTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResourcesWrapper; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class PrivateLinkResourcesGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"groupId\":\"pp\",\"requiredMembers\":[\"svioxgqxgidjiijp\"],\"requiredZoneNames\":[\"knb\",\"z\"]},\"name\":\"aecujlaecwnnmnfw\",\"type\":\"lvkrnsodohpid\",\"etag\":\"yf\",\"id\":\"bhepxqtkynlizn\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PrivateLinkResourcesWrapper response = manager.privateLinkResources() - .getWithResponse("cduwuintiib", "ounsydjcem", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("bhepxqtkynlizn", response.value().get(0).id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java deleted file mode 100644 index f4abd3945fe4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PrivateLinkResourcesWrapperInnerTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.PrivateLinkResourcesWrapperInner; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResource; -import com.azure.resourcemanager.datafactory.models.PrivateLinkResourceProperties; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class PrivateLinkResourcesWrapperInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PrivateLinkResourcesWrapperInner model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"groupId\":\"mmuc\",\"requiredMembers\":[\"i\",\"kflrmymy\",\"nc\"],\"requiredZoneNames\":[\"riswslmiiio\"]},\"name\":\"qcgxuug\",\"type\":\"ctotiowlx\",\"etag\":\"qd\",\"id\":\"jgwdtgukranbl\"}]}") - .toObject(PrivateLinkResourcesWrapperInner.class); - Assertions.assertEquals("jgwdtgukranbl", model.value().get(0).id()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PrivateLinkResourcesWrapperInner model = new PrivateLinkResourcesWrapperInner().withValue(Arrays.asList( - new PrivateLinkResource().withId("jgwdtgukranbl").withProperties(new PrivateLinkResourceProperties()))); - model = BinaryData.fromObject(model).toObject(PrivateLinkResourcesWrapperInner.class); - Assertions.assertEquals("jgwdtgukranbl", model.value().get(0).id()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java deleted file mode 100644 index 7b31f7d8a302..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/PurviewConfigurationTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PurviewConfiguration; -import org.junit.jupiter.api.Assertions; - -public final class PurviewConfigurationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - PurviewConfiguration model - = BinaryData.fromString("{\"purviewResourceId\":\"stkiiuxhqyud\"}").toObject(PurviewConfiguration.class); - Assertions.assertEquals("stkiiuxhqyud", model.purviewResourceId()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - PurviewConfiguration model = new PurviewConfiguration().withPurviewResourceId("stkiiuxhqyud"); - model = BinaryData.fromObject(model).toObject(PurviewConfiguration.class); - Assertions.assertEquals("stkiiuxhqyud", model.purviewResourceId()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java deleted file mode 100644 index 215fa5d686e8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QueryDataFlowDebugSessionsResponseTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.DataFlowDebugSessionInfoInner; -import com.azure.resourcemanager.datafactory.models.QueryDataFlowDebugSessionsResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class QueryDataFlowDebugSessionsResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - QueryDataFlowDebugSessionsResponse model = BinaryData.fromString( - "{\"value\":[{\"dataFlowName\":\"noda\",\"computeType\":\"pqhe\",\"coreCount\":1635095275,\"nodeCount\":1244000330,\"integrationRuntimeName\":\"gsbos\",\"sessionId\":\"eln\",\"startTime\":\"atutmzlbiojlvfhr\",\"timeToLiveInMinutes\":438246123,\"lastActivityTime\":\"eqvcwwyyurmoch\",\"\":{\"lbkpb\":\"dataprsnmokayzejn\",\"hahzvechndbnwi\":\"datapcpil\"}}],\"nextLink\":\"olewjwi\"}") - .toObject(QueryDataFlowDebugSessionsResponse.class); - Assertions.assertEquals("noda", model.value().get(0).dataFlowName()); - Assertions.assertEquals("pqhe", model.value().get(0).computeType()); - Assertions.assertEquals(1635095275, model.value().get(0).coreCount()); - Assertions.assertEquals(1244000330, model.value().get(0).nodeCount()); - Assertions.assertEquals("gsbos", model.value().get(0).integrationRuntimeName()); - Assertions.assertEquals("eln", model.value().get(0).sessionId()); - Assertions.assertEquals("atutmzlbiojlvfhr", model.value().get(0).startTime()); - Assertions.assertEquals(438246123, model.value().get(0).timeToLiveInMinutes()); - Assertions.assertEquals("eqvcwwyyurmoch", model.value().get(0).lastActivityTime()); - Assertions.assertEquals("olewjwi", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - QueryDataFlowDebugSessionsResponse model = new QueryDataFlowDebugSessionsResponse() - .withValue(Arrays.asList(new DataFlowDebugSessionInfoInner().withDataFlowName("noda") - .withComputeType("pqhe") - .withCoreCount(1635095275) - .withNodeCount(1244000330) - .withIntegrationRuntimeName("gsbos") - .withSessionId("eln") - .withStartTime("atutmzlbiojlvfhr") - .withTimeToLiveInMinutes(438246123) - .withLastActivityTime("eqvcwwyyurmoch") - .withAdditionalProperties(mapOf()))) - .withNextLink("olewjwi"); - model = BinaryData.fromObject(model).toObject(QueryDataFlowDebugSessionsResponse.class); - Assertions.assertEquals("noda", model.value().get(0).dataFlowName()); - Assertions.assertEquals("pqhe", model.value().get(0).computeType()); - Assertions.assertEquals(1635095275, model.value().get(0).coreCount()); - Assertions.assertEquals(1244000330, model.value().get(0).nodeCount()); - Assertions.assertEquals("gsbos", model.value().get(0).integrationRuntimeName()); - Assertions.assertEquals("eln", model.value().get(0).sessionId()); - Assertions.assertEquals("atutmzlbiojlvfhr", model.value().get(0).startTime()); - Assertions.assertEquals(438246123, model.value().get(0).timeToLiveInMinutes()); - Assertions.assertEquals("eqvcwwyyurmoch", model.value().get(0).lastActivityTime()); - Assertions.assertEquals("olewjwi", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java deleted file mode 100644 index 5bad2e9b5903..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksObjectDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.QuickBooksObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class QuickBooksObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - QuickBooksObjectDataset model = BinaryData.fromString( - "{\"type\":\"QuickBooksObject\",\"typeProperties\":{\"tableName\":\"datazw\"},\"description\":\"zuh\",\"structure\":\"datatiaczhfjdccjny\",\"schema\":\"databt\",\"linkedServiceName\":{\"referenceName\":\"uhjcgj\",\"parameters\":{\"r\":\"datantomnlzthcdbszsb\",\"dct\":\"dataxeyvidcowlrm\"}},\"parameters\":{\"ryzgavpln\":{\"type\":\"Object\",\"defaultValue\":\"datajoezvw\"}},\"annotations\":[\"datafiekkiskyyy\",\"dataek\",\"datafffyshdawjlmlcuf\"],\"folder\":{\"name\":\"iutfofhoajjylsy\"},\"\":{\"ceevogir\":\"datanufzvlqquyh\",\"mds\":\"datapwnqtvuxeu\",\"axxfdlypkcpw\":\"dataij\"}}") - .toObject(QuickBooksObjectDataset.class); - Assertions.assertEquals("zuh", model.description()); - Assertions.assertEquals("uhjcgj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ryzgavpln").type()); - Assertions.assertEquals("iutfofhoajjylsy", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - QuickBooksObjectDataset model = new QuickBooksObjectDataset().withDescription("zuh") - .withStructure("datatiaczhfjdccjny") - .withSchema("databt") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("uhjcgj") - .withParameters(mapOf("r", "datantomnlzthcdbszsb", "dct", "dataxeyvidcowlrm"))) - .withParameters(mapOf("ryzgavpln", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datajoezvw"))) - .withAnnotations(Arrays.asList("datafiekkiskyyy", "dataek", "datafffyshdawjlmlcuf")) - .withFolder(new DatasetFolder().withName("iutfofhoajjylsy")) - .withTableName("datazw"); - model = BinaryData.fromObject(model).toObject(QuickBooksObjectDataset.class); - Assertions.assertEquals("zuh", model.description()); - Assertions.assertEquals("uhjcgj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("ryzgavpln").type()); - Assertions.assertEquals("iutfofhoajjylsy", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java deleted file mode 100644 index 28024bfe27d7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/QuickBooksSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.QuickBooksSource; - -public final class QuickBooksSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - QuickBooksSource model = BinaryData.fromString( - "{\"type\":\"QuickBooksSource\",\"query\":\"datavrjhtpxydiuviup\",\"queryTimeout\":\"datatnsyrrybdyqiv\",\"additionalColumns\":\"datasuhozihd\",\"sourceRetryCount\":\"datadjwth\",\"sourceRetryWait\":\"datapijgasnafd\",\"maxConcurrentConnections\":\"datanwgirnjgso\",\"disableMetricsCollection\":\"databdhrcepanhygca\",\"\":{\"svjzbggsnanojt\":\"databjjl\",\"fron\":\"datahehzxzaz\"}}") - .toObject(QuickBooksSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - QuickBooksSource model = new QuickBooksSource().withSourceRetryCount("datadjwth") - .withSourceRetryWait("datapijgasnafd") - .withMaxConcurrentConnections("datanwgirnjgso") - .withDisableMetricsCollection("databdhrcepanhygca") - .withQueryTimeout("datatnsyrrybdyqiv") - .withAdditionalColumns("datasuhozihd") - .withQuery("datavrjhtpxydiuviup"); - model = BinaryData.fromObject(model).toObject(QuickBooksSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java deleted file mode 100644 index 0ad09ba10989..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleOccurrenceTests.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DayOfWeek; -import com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class RecurrenceScheduleOccurrenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RecurrenceScheduleOccurrence model = BinaryData.fromString( - "{\"day\":\"Tuesday\",\"occurrence\":70706554,\"\":{\"axkivryvbcxnnir\":\"dataxobdqobngjbeih\",\"svnotbe\":\"datafuvesmepqrkjyp\",\"tex\":\"datafshfmw\",\"ndtjcyvmsd\":\"datag\"}}") - .toObject(RecurrenceScheduleOccurrence.class); - Assertions.assertEquals(DayOfWeek.TUESDAY, model.day()); - Assertions.assertEquals(70706554, model.occurrence()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RecurrenceScheduleOccurrence model = new RecurrenceScheduleOccurrence().withDay(DayOfWeek.TUESDAY) - .withOccurrence(70706554) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(RecurrenceScheduleOccurrence.class); - Assertions.assertEquals(DayOfWeek.TUESDAY, model.day()); - Assertions.assertEquals(70706554, model.occurrence()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java deleted file mode 100644 index 6c378312acc3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RecurrenceScheduleTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DayOfWeek; -import com.azure.resourcemanager.datafactory.models.DaysOfWeek; -import com.azure.resourcemanager.datafactory.models.RecurrenceSchedule; -import com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class RecurrenceScheduleTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RecurrenceSchedule model = BinaryData.fromString( - "{\"minutes\":[1084855348,1703489869,2077722094],\"hours\":[479973707],\"weekDays\":[\"Thursday\",\"Monday\",\"Monday\"],\"monthDays\":[1682107136,85333768],\"monthlyOccurrences\":[{\"day\":\"Friday\",\"occurrence\":257877485,\"\":{\"wjrpl\":\"dataiihabojargetncfl\",\"naeefzlwohobaac\":\"datakcqe\",\"noeiqhbr\":\"dataglvixfl\",\"kpbrr\":\"datacgmyjmcw\"}}],\"\":{\"rsandmusud\":\"datavinkkteblrn\",\"mikfdifbeottvio\":\"datajoshmmzotcpf\",\"va\":\"datanejpjzqbdutvnlo\"}}") - .toObject(RecurrenceSchedule.class); - Assertions.assertEquals(1084855348, model.minutes().get(0)); - Assertions.assertEquals(479973707, model.hours().get(0)); - Assertions.assertEquals(DaysOfWeek.THURSDAY, model.weekDays().get(0)); - Assertions.assertEquals(1682107136, model.monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.FRIDAY, model.monthlyOccurrences().get(0).day()); - Assertions.assertEquals(257877485, model.monthlyOccurrences().get(0).occurrence()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RecurrenceSchedule model - = new RecurrenceSchedule().withMinutes(Arrays.asList(1084855348, 1703489869, 2077722094)) - .withHours(Arrays.asList(479973707)) - .withWeekDays(Arrays.asList(DaysOfWeek.THURSDAY, DaysOfWeek.MONDAY, DaysOfWeek.MONDAY)) - .withMonthDays(Arrays.asList(1682107136, 85333768)) - .withMonthlyOccurrences(Arrays.asList(new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY) - .withOccurrence(257877485) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(RecurrenceSchedule.class); - Assertions.assertEquals(1084855348, model.minutes().get(0)); - Assertions.assertEquals(479973707, model.hours().get(0)); - Assertions.assertEquals(DaysOfWeek.THURSDAY, model.weekDays().get(0)); - Assertions.assertEquals(1682107136, model.monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.FRIDAY, model.monthlyOccurrences().get(0).day()); - Assertions.assertEquals(257877485, model.monthlyOccurrences().get(0).occurrence()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java deleted file mode 100644 index 1ba0ef727eb6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedirectIncompatibleRowSettingsTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RedirectIncompatibleRowSettings; -import java.util.HashMap; -import java.util.Map; - -public final class RedirectIncompatibleRowSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RedirectIncompatibleRowSettings model = BinaryData.fromString( - "{\"linkedServiceName\":\"datakgbkzqbo\",\"path\":\"datahdyasklmyfh\",\"\":{\"f\":\"datawedetawljat\",\"jtszqexcqcwbxx\":\"datadqspdutt\",\"lvkdwwqhhlfvmwu\":\"datacvekqjdru\",\"fbdanfexlawkeq\":\"dataarswsvtzotmwxq\"}}") - .toObject(RedirectIncompatibleRowSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RedirectIncompatibleRowSettings model - = new RedirectIncompatibleRowSettings().withLinkedServiceName("datakgbkzqbo") - .withPath("datahdyasklmyfh") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(RedirectIncompatibleRowSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java deleted file mode 100644 index ce6460da00c1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RedshiftUnloadSettingsTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.RedshiftUnloadSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class RedshiftUnloadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RedshiftUnloadSettings model = BinaryData.fromString( - "{\"s3LinkedServiceName\":{\"referenceName\":\"jw\",\"parameters\":{\"jwdw\":\"datanmavf\",\"e\":\"datanx\"}},\"bucketName\":\"datand\"}") - .toObject(RedshiftUnloadSettings.class); - Assertions.assertEquals("jw", model.s3LinkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RedshiftUnloadSettings model - = new RedshiftUnloadSettings().withS3LinkedServiceName(new LinkedServiceReference().withReferenceName("jw") - .withParameters(mapOf("jwdw", "datanmavf", "e", "datanx"))).withBucketName("datand"); - model = BinaryData.fromObject(model).toObject(RedshiftUnloadSettings.class); - Assertions.assertEquals("jw", model.s3LinkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java deleted file mode 100644 index 33779bcfdd19..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RelationalSource; - -public final class RelationalSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RelationalSource model = BinaryData.fromString( - "{\"type\":\"RelationalSource\",\"query\":\"dataqvvtjwdlduvim\",\"additionalColumns\":\"dataceormxoxtapaf\",\"sourceRetryCount\":\"datavbkjtgzkcptav\",\"sourceRetryWait\":\"datapydnujgblski\",\"maxConcurrentConnections\":\"datarvpuacajxdr\",\"disableMetricsCollection\":\"datapuxpzslm\",\"\":{\"fge\":\"datapzrycchqz\",\"ch\":\"datadzgszjhekbmd\",\"vwysbme\":\"dataojsrhgpitye\",\"bznl\":\"dataf\"}}") - .toObject(RelationalSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RelationalSource model = new RelationalSource().withSourceRetryCount("datavbkjtgzkcptav") - .withSourceRetryWait("datapydnujgblski") - .withMaxConcurrentConnections("datarvpuacajxdr") - .withDisableMetricsCollection("datapuxpzslm") - .withQuery("dataqvvtjwdlduvim") - .withAdditionalColumns("dataceormxoxtapaf"); - model = BinaryData.fromObject(model).toObject(RelationalSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java deleted file mode 100644 index cce30ef6f588..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.RelationalTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class RelationalTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RelationalTableDataset model = BinaryData.fromString( - "{\"type\":\"RelationalTable\",\"typeProperties\":{\"tableName\":\"datacytjgoea\"},\"description\":\"krwfmihwpadhedb\",\"structure\":\"databdczvothmkhjao\",\"schema\":\"databwfcn\",\"linkedServiceName\":{\"referenceName\":\"hbpoelhscmyhrhj\",\"parameters\":{\"pqwojoev\":\"datafqbokndwp\"}},\"parameters\":{\"krbuoggtdltlcuha\":{\"type\":\"Int\",\"defaultValue\":\"datatdxmlynzlyvap\"},\"ylzeohlpsftq\":{\"type\":\"Bool\",\"defaultValue\":\"datajv\"},\"vbvvcpwtqsu\":{\"type\":\"Int\",\"defaultValue\":\"datam\"}},\"annotations\":[\"datahmz\",\"datadffetevr\"],\"folder\":{\"name\":\"k\"},\"\":{\"ctkhfh\":\"datacycsyo\",\"rmhnmizhvpr\":\"datasatvcs\"}}") - .toObject(RelationalTableDataset.class); - Assertions.assertEquals("krwfmihwpadhedb", model.description()); - Assertions.assertEquals("hbpoelhscmyhrhj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("krbuoggtdltlcuha").type()); - Assertions.assertEquals("k", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RelationalTableDataset model = new RelationalTableDataset().withDescription("krwfmihwpadhedb") - .withStructure("databdczvothmkhjao") - .withSchema("databwfcn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hbpoelhscmyhrhj") - .withParameters(mapOf("pqwojoev", "datafqbokndwp"))) - .withParameters(mapOf("krbuoggtdltlcuha", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datatdxmlynzlyvap"), - "ylzeohlpsftq", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datajv"), - "vbvvcpwtqsu", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datam"))) - .withAnnotations(Arrays.asList("datahmz", "datadffetevr")) - .withFolder(new DatasetFolder().withName("k")) - .withTableName("datacytjgoea"); - model = BinaryData.fromObject(model).toObject(RelationalTableDataset.class); - Assertions.assertEquals("krwfmihwpadhedb", model.description()); - Assertions.assertEquals("hbpoelhscmyhrhj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("krbuoggtdltlcuha").type()); - Assertions.assertEquals("k", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java deleted file mode 100644 index 2f80901008ab..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RelationalTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.RelationalTableDatasetTypeProperties; - -public final class RelationalTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RelationalTableDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"dataqwcublehhk\"}") - .toObject(RelationalTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RelationalTableDatasetTypeProperties model - = new RelationalTableDatasetTypeProperties().withTableName("dataqwcublehhk"); - model = BinaryData.fromObject(model).toObject(RelationalTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java deleted file mode 100644 index 45dd96621995..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RemotePrivateEndpointConnectionTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ArmIdWrapper; -import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState; -import com.azure.resourcemanager.datafactory.models.RemotePrivateEndpointConnection; -import org.junit.jupiter.api.Assertions; - -public final class RemotePrivateEndpointConnectionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RemotePrivateEndpointConnection model = BinaryData.fromString( - "{\"provisioningState\":\"wka\",\"privateEndpoint\":{\"id\":\"jyfdvlv\"},\"privateLinkServiceConnectionState\":{\"status\":\"rnfxtgddp\",\"description\":\"hehnmnaoya\",\"actionsRequired\":\"coeqswankltytm\"}}") - .toObject(RemotePrivateEndpointConnection.class); - Assertions.assertEquals("rnfxtgddp", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("hehnmnaoya", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("coeqswankltytm", model.privateLinkServiceConnectionState().actionsRequired()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RemotePrivateEndpointConnection model - = new RemotePrivateEndpointConnection().withPrivateEndpoint(new ArmIdWrapper()) - .withPrivateLinkServiceConnectionState(new PrivateLinkConnectionState().withStatus("rnfxtgddp") - .withDescription("hehnmnaoya") - .withActionsRequired("coeqswankltytm")); - model = BinaryData.fromObject(model).toObject(RemotePrivateEndpointConnection.class); - Assertions.assertEquals("rnfxtgddp", model.privateLinkServiceConnectionState().status()); - Assertions.assertEquals("hehnmnaoya", model.privateLinkServiceConnectionState().description()); - Assertions.assertEquals("coeqswankltytm", model.privateLinkServiceConnectionState().actionsRequired()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java deleted file mode 100644 index 8030d3d226d8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RerunTumblingWindowTrigger; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class RerunTumblingWindowTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RerunTumblingWindowTrigger model = BinaryData.fromString( - "{\"type\":\"RerunTumblingWindowTrigger\",\"typeProperties\":{\"parentTrigger\":\"datapmsyhrvifurg\",\"requestedStartTime\":\"2021-02-04T22:48:43Z\",\"requestedEndTime\":\"2021-11-14T02:33:51Z\",\"rerunConcurrency\":1565427178},\"description\":\"fvuqi\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datafghc\",\"dataiipnszrrmq\",\"datakxyawtdsn\"],\"\":{\"mdecryoffglwmkm\":\"datax\",\"lqnzxsdbfbkqi\":\"dataxusn\",\"ngrdu\":\"dataehxmztf\"}}") - .toObject(RerunTumblingWindowTrigger.class); - Assertions.assertEquals("fvuqi", model.description()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-04T22:48:43Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-14T02:33:51Z"), model.requestedEndTime()); - Assertions.assertEquals(1565427178, model.rerunConcurrency()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RerunTumblingWindowTrigger model = new RerunTumblingWindowTrigger().withDescription("fvuqi") - .withAnnotations(Arrays.asList("datafghc", "dataiipnszrrmq", "datakxyawtdsn")) - .withParentTrigger("datapmsyhrvifurg") - .withRequestedStartTime(OffsetDateTime.parse("2021-02-04T22:48:43Z")) - .withRequestedEndTime(OffsetDateTime.parse("2021-11-14T02:33:51Z")) - .withRerunConcurrency(1565427178); - model = BinaryData.fromObject(model).toObject(RerunTumblingWindowTrigger.class); - Assertions.assertEquals("fvuqi", model.description()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-04T22:48:43Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-14T02:33:51Z"), model.requestedEndTime()); - Assertions.assertEquals(1565427178, model.rerunConcurrency()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java deleted file mode 100644 index 6a6fe4afa589..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RerunTumblingWindowTriggerTypePropertiesTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.RerunTumblingWindowTriggerTypeProperties; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; - -public final class RerunTumblingWindowTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RerunTumblingWindowTriggerTypeProperties model = BinaryData.fromString( - "{\"parentTrigger\":\"dataozdzbhtfmgpio\",\"requestedStartTime\":\"2021-06-14T16:53:17Z\",\"requestedEndTime\":\"2021-01-15T05:57:27Z\",\"rerunConcurrency\":1333339842}") - .toObject(RerunTumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-14T16:53:17Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-15T05:57:27Z"), model.requestedEndTime()); - Assertions.assertEquals(1333339842, model.rerunConcurrency()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RerunTumblingWindowTriggerTypeProperties model - = new RerunTumblingWindowTriggerTypeProperties().withParentTrigger("dataozdzbhtfmgpio") - .withRequestedStartTime(OffsetDateTime.parse("2021-06-14T16:53:17Z")) - .withRequestedEndTime(OffsetDateTime.parse("2021-01-15T05:57:27Z")) - .withRerunConcurrency(1333339842); - model = BinaryData.fromObject(model).toObject(RerunTumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-14T16:53:17Z"), model.requestedStartTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-15T05:57:27Z"), model.requestedEndTime()); - Assertions.assertEquals(1333339842, model.rerunConcurrency()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java deleted file mode 100644 index 316c5eaf52ce..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysObjectDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.ResponsysObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ResponsysObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ResponsysObjectDataset model = BinaryData.fromString( - "{\"type\":\"ResponsysObject\",\"typeProperties\":{\"tableName\":\"datapdzy\"},\"description\":\"khxfpz\",\"structure\":\"dataudqhad\",\"schema\":\"datavvlyibweuaugtxl\",\"linkedServiceName\":{\"referenceName\":\"ncoqxtvytzq\",\"parameters\":{\"zbdbrlbo\":\"datadjvzmxyrazzstjvc\",\"acbibtk\":\"dataltyo\"}},\"parameters\":{\"iiul\":{\"type\":\"Int\",\"defaultValue\":\"datapmwxdsokrlnrpeyl\"},\"xwwwvunknsgvxhx\":{\"type\":\"Int\",\"defaultValue\":\"dataiqlnh\"},\"tehqyoytrcoufkq\":{\"type\":\"Float\",\"defaultValue\":\"dataatrtcqyfjvifbmo\"}},\"annotations\":[\"dataukdfpknvkiv\"],\"folder\":{\"name\":\"ezchmeodhzjlrknc\"},\"\":{\"eqkwqphfvsfts\":\"dataxmu\",\"hdtezgfctu\":\"datatwlpxca\"}}") - .toObject(ResponsysObjectDataset.class); - Assertions.assertEquals("khxfpz", model.description()); - Assertions.assertEquals("ncoqxtvytzq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("iiul").type()); - Assertions.assertEquals("ezchmeodhzjlrknc", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ResponsysObjectDataset model = new ResponsysObjectDataset().withDescription("khxfpz") - .withStructure("dataudqhad") - .withSchema("datavvlyibweuaugtxl") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ncoqxtvytzq") - .withParameters(mapOf("zbdbrlbo", "datadjvzmxyrazzstjvc", "acbibtk", "dataltyo"))) - .withParameters(mapOf("iiul", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datapmwxdsokrlnrpeyl"), - "xwwwvunknsgvxhx", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataiqlnh"), - "tehqyoytrcoufkq", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataatrtcqyfjvifbmo"))) - .withAnnotations(Arrays.asList("dataukdfpknvkiv")) - .withFolder(new DatasetFolder().withName("ezchmeodhzjlrknc")) - .withTableName("datapdzy"); - model = BinaryData.fromObject(model).toObject(ResponsysObjectDataset.class); - Assertions.assertEquals("khxfpz", model.description()); - Assertions.assertEquals("ncoqxtvytzq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("iiul").type()); - Assertions.assertEquals("ezchmeodhzjlrknc", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java deleted file mode 100644 index d2e87c2b89e3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ResponsysSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ResponsysSource; - -public final class ResponsysSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ResponsysSource model = BinaryData.fromString( - "{\"type\":\"ResponsysSource\",\"query\":\"dataskdzs\",\"queryTimeout\":\"datahvzgliu\",\"additionalColumns\":\"datactgsdxjxkddxo\",\"sourceRetryCount\":\"datalprsrkennnyyvv\",\"sourceRetryWait\":\"datasad\",\"maxConcurrentConnections\":\"datamnjtfplgxcjr\",\"disableMetricsCollection\":\"datab\",\"\":{\"iyycoflj\":\"dataisfjamgnpeosu\"}}") - .toObject(ResponsysSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ResponsysSource model = new ResponsysSource().withSourceRetryCount("datalprsrkennnyyvv") - .withSourceRetryWait("datasad") - .withMaxConcurrentConnections("datamnjtfplgxcjr") - .withDisableMetricsCollection("datab") - .withQueryTimeout("datahvzgliu") - .withAdditionalColumns("datactgsdxjxkddxo") - .withQuery("dataskdzs"); - model = BinaryData.fromObject(model).toObject(ResponsysSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java deleted file mode 100644 index 2bbbf9de2b44..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.RestResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class RestResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RestResourceDataset model = BinaryData.fromString( - "{\"type\":\"RestResource\",\"typeProperties\":{\"relativeUrl\":\"datamplu\",\"requestMethod\":\"dataebrmjjnfp\",\"requestBody\":\"datatbttpkimskhnkkh\",\"additionalHeaders\":{\"el\":\"datarsmhrc\",\"ybnzbnvmsh\":\"datacpgokuthr\",\"ebxiauqsuptessj\":\"datauzzlapyixlvzcgul\"},\"paginationRules\":{\"aslkvcvwpvl\":\"datatatpvblskxgxqay\",\"bwh\":\"datajbvyezjwjkqo\",\"gqefgzjvbxqcb\":\"dataieyozvrcwfpucwnb\",\"uuciagvkdlhu\":\"dataoarx\"}},\"description\":\"klbjoafmjfe\",\"structure\":\"datalvoepknarse\",\"schema\":\"datancsqoacbuqd\",\"linkedServiceName\":{\"referenceName\":\"sapleqfg\",\"parameters\":{\"z\":\"datanvszglvyake\"}},\"parameters\":{\"brveci\":{\"type\":\"Int\",\"defaultValue\":\"datalaqcwggchxvlqg\"},\"unnep\":{\"type\":\"Array\",\"defaultValue\":\"datavphirlzbip\"},\"vwkaujttwyk\":{\"type\":\"Object\",\"defaultValue\":\"datazzkueruwcjomi\"},\"lfkvga\":{\"type\":\"Float\",\"defaultValue\":\"databwofxxdplr\"}},\"annotations\":[\"datatuxlbpxrhrfjen\",\"dataazwef\",\"dataktlhqash\",\"dataostjixyz\"],\"folder\":{\"name\":\"i\"},\"\":{\"cbpzf\":\"datadwjtacfvvtd\",\"fiwltkfysu\":\"datamcsaugbr\",\"hkl\":\"datate\",\"syyhgqokjbmsrk\":\"datawhcv\"}}") - .toObject(RestResourceDataset.class); - Assertions.assertEquals("klbjoafmjfe", model.description()); - Assertions.assertEquals("sapleqfg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("brveci").type()); - Assertions.assertEquals("i", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RestResourceDataset model = new RestResourceDataset().withDescription("klbjoafmjfe") - .withStructure("datalvoepknarse") - .withSchema("datancsqoacbuqd") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("sapleqfg") - .withParameters(mapOf("z", "datanvszglvyake"))) - .withParameters(mapOf("brveci", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datalaqcwggchxvlqg"), - "unnep", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavphirlzbip"), - "vwkaujttwyk", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datazzkueruwcjomi"), - "lfkvga", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("databwofxxdplr"))) - .withAnnotations(Arrays.asList("datatuxlbpxrhrfjen", "dataazwef", "dataktlhqash", "dataostjixyz")) - .withFolder(new DatasetFolder().withName("i")) - .withRelativeUrl("datamplu") - .withRequestMethod("dataebrmjjnfp") - .withRequestBody("datatbttpkimskhnkkh") - .withAdditionalHeaders( - mapOf("el", "datarsmhrc", "ybnzbnvmsh", "datacpgokuthr", "ebxiauqsuptessj", "datauzzlapyixlvzcgul")) - .withPaginationRules(mapOf("aslkvcvwpvl", "datatatpvblskxgxqay", "bwh", "datajbvyezjwjkqo", "gqefgzjvbxqcb", - "dataieyozvrcwfpucwnb", "uuciagvkdlhu", "dataoarx")); - model = BinaryData.fromObject(model).toObject(RestResourceDataset.class); - Assertions.assertEquals("klbjoafmjfe", model.description()); - Assertions.assertEquals("sapleqfg", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("brveci").type()); - Assertions.assertEquals("i", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java deleted file mode 100644 index de352c349598..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.RestResourceDatasetTypeProperties; -import java.util.HashMap; -import java.util.Map; - -public final class RestResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RestResourceDatasetTypeProperties model = BinaryData.fromString( - "{\"relativeUrl\":\"datayxxhwrlqom\",\"requestMethod\":\"datasyilpzzbrwnrzoz\",\"requestBody\":\"dataagysokl\",\"additionalHeaders\":{\"bvrrbnh\":\"datas\",\"bhujcydyl\":\"datal\",\"izsyq\":\"datamxvps\",\"llcbrva\":\"datag\"},\"paginationRules\":{\"jjrnogykugdlavsa\":\"datalkyhtrrqwfyybpt\",\"fkbzbfbxjblajy\":\"datagthkslgeu\",\"dj\":\"datadnbycsbtoisa\",\"zxgnywxu\":\"dataofsv\"}}") - .toObject(RestResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RestResourceDatasetTypeProperties model - = new RestResourceDatasetTypeProperties().withRelativeUrl("datayxxhwrlqom") - .withRequestMethod("datasyilpzzbrwnrzoz") - .withRequestBody("dataagysokl") - .withAdditionalHeaders( - mapOf("bvrrbnh", "datas", "bhujcydyl", "datal", "izsyq", "datamxvps", "llcbrva", "datag")) - .withPaginationRules(mapOf("jjrnogykugdlavsa", "datalkyhtrrqwfyybpt", "fkbzbfbxjblajy", "datagthkslgeu", - "dj", "datadnbycsbtoisa", "zxgnywxu", "dataofsv")); - model = BinaryData.fromObject(model).toObject(RestResourceDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java deleted file mode 100644 index b27989e37579..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSinkTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RestSink; - -public final class RestSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RestSink model = BinaryData.fromString( - "{\"type\":\"RestSink\",\"requestMethod\":\"databtwelutr\",\"additionalHeaders\":\"datazhwpxpsc\",\"httpRequestTimeout\":\"dataltslfccyavy\",\"requestInterval\":\"datamndrdqqjkegbmld\",\"httpCompressionType\":\"dataiherzkhiovhjkw\",\"writeBatchSize\":\"datalpjrepahvoir\",\"writeBatchTimeout\":\"dataxqnzss\",\"sinkRetryCount\":\"dataldtqykzmwdoqre\",\"sinkRetryWait\":\"datatrnqxi\",\"maxConcurrentConnections\":\"dataozryoxmfrxfxyc\",\"disableMetricsCollection\":\"dataalvchfumlf\",\"\":{\"qlrt\":\"datazxxkokipklfwnhfk\",\"jitbnhglrvlarozs\":\"datafswqdkv\",\"abgsdxtwqqukgo\":\"datamucr\",\"ksltunrwxsqvx\":\"datalvjgsk\"}}") - .toObject(RestSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RestSink model = new RestSink().withWriteBatchSize("datalpjrepahvoir") - .withWriteBatchTimeout("dataxqnzss") - .withSinkRetryCount("dataldtqykzmwdoqre") - .withSinkRetryWait("datatrnqxi") - .withMaxConcurrentConnections("dataozryoxmfrxfxyc") - .withDisableMetricsCollection("dataalvchfumlf") - .withRequestMethod("databtwelutr") - .withAdditionalHeaders("datazhwpxpsc") - .withHttpRequestTimeout("dataltslfccyavy") - .withRequestInterval("datamndrdqqjkegbmld") - .withHttpCompressionType("dataiherzkhiovhjkw"); - model = BinaryData.fromObject(model).toObject(RestSink.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java deleted file mode 100644 index bcc108adb393..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RestSourceTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RestSource; - -public final class RestSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RestSource model = BinaryData.fromString( - "{\"type\":\"RestSource\",\"requestMethod\":\"datapccfwq\",\"requestBody\":\"dataouqyzxzjehdklvqt\",\"additionalHeaders\":\"dataoc\",\"paginationRules\":\"dataetctjh\",\"httpRequestTimeout\":\"datamoazsjsuevfvnn\",\"requestInterval\":\"dataccvxqbxgq\",\"additionalColumns\":\"datawnriwxe\",\"sourceRetryCount\":\"databv\",\"sourceRetryWait\":\"dataldi\",\"maxConcurrentConnections\":\"dataxsvzwbktalobxl\",\"disableMetricsCollection\":\"datajthmibqgld\",\"\":{\"evjealx\":\"datakalpqlnn\",\"fdkkvijilfqvodz\":\"dataewlwbxuf\"}}") - .toObject(RestSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RestSource model = new RestSource().withSourceRetryCount("databv") - .withSourceRetryWait("dataldi") - .withMaxConcurrentConnections("dataxsvzwbktalobxl") - .withDisableMetricsCollection("datajthmibqgld") - .withRequestMethod("datapccfwq") - .withRequestBody("dataouqyzxzjehdklvqt") - .withAdditionalHeaders("dataoc") - .withPaginationRules("dataetctjh") - .withHttpRequestTimeout("datamoazsjsuevfvnn") - .withRequestInterval("dataccvxqbxgq") - .withAdditionalColumns("datawnriwxe"); - model = BinaryData.fromObject(model).toObject(RestSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java deleted file mode 100644 index 8f3bcaccac2a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RetryPolicyTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RetryPolicy; -import org.junit.jupiter.api.Assertions; - -public final class RetryPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RetryPolicy model = BinaryData.fromString("{\"count\":\"datafhclssedxiig\",\"intervalInSeconds\":2011535990}") - .toObject(RetryPolicy.class); - Assertions.assertEquals(2011535990, model.intervalInSeconds()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RetryPolicy model = new RetryPolicy().withCount("datafhclssedxiig").withIntervalInSeconds(2011535990); - model = BinaryData.fromObject(model).toObject(RetryPolicy.class); - Assertions.assertEquals(2011535990, model.intervalInSeconds()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java deleted file mode 100644 index b5c8ffd61138..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryFilterTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RunQueryFilter; -import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand; -import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class RunQueryFilterTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RunQueryFilter model = BinaryData.fromString( - "{\"operand\":\"TriggerName\",\"operator\":\"Equals\",\"values\":[\"vewzcj\",\"nmwcpmgu\",\"adraufactkahzo\",\"ajjziuxxpshne\"]}") - .toObject(RunQueryFilter.class); - Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_NAME, model.operand()); - Assertions.assertEquals(RunQueryFilterOperator.EQUALS, model.operator()); - Assertions.assertEquals("vewzcj", model.values().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RunQueryFilter model = new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME) - .withOperator(RunQueryFilterOperator.EQUALS) - .withValues(Arrays.asList("vewzcj", "nmwcpmgu", "adraufactkahzo", "ajjziuxxpshne")); - model = BinaryData.fromObject(model).toObject(RunQueryFilter.class); - Assertions.assertEquals(RunQueryFilterOperand.TRIGGER_NAME, model.operand()); - Assertions.assertEquals(RunQueryFilterOperator.EQUALS, model.operator()); - Assertions.assertEquals("vewzcj", model.values().get(0)); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java deleted file mode 100644 index 21f7fa250ff9..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/RunQueryOrderByTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.RunQueryOrder; -import com.azure.resourcemanager.datafactory.models.RunQueryOrderBy; -import com.azure.resourcemanager.datafactory.models.RunQueryOrderByField; -import org.junit.jupiter.api.Assertions; - -public final class RunQueryOrderByTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RunQueryOrderBy model - = BinaryData.fromString("{\"orderBy\":\"ActivityName\",\"order\":\"ASC\"}").toObject(RunQueryOrderBy.class); - Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_NAME, model.orderBy()); - Assertions.assertEquals(RunQueryOrder.ASC, model.order()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RunQueryOrderBy model - = new RunQueryOrderBy().withOrderBy(RunQueryOrderByField.ACTIVITY_NAME).withOrder(RunQueryOrder.ASC); - model = BinaryData.fromObject(model).toObject(RunQueryOrderBy.class); - Assertions.assertEquals(RunQueryOrderByField.ACTIVITY_NAME, model.orderBy()); - Assertions.assertEquals(RunQueryOrder.ASC, model.order()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java deleted file mode 100644 index 74969ec47414..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SalesforceMarketingCloudObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceMarketingCloudObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceMarketingCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceMarketingCloudObject\",\"typeProperties\":{\"tableName\":\"datadsluokcevoxd\"},\"description\":\"pwmgwxwukfjv\",\"structure\":\"datalaxseisv\",\"schema\":\"datagyphheovejkpalec\",\"linkedServiceName\":{\"referenceName\":\"tlthrt\",\"parameters\":{\"qefnquoll\":\"datavfssrg\",\"qqiehdhjofywwna\":\"datauurmuzem\"}},\"parameters\":{\"slqcxuthv\":{\"type\":\"Int\",\"defaultValue\":\"datarx\"}},\"annotations\":[\"datalyyhrgmabspmlu\",\"datayju\",\"datakedputocrb\",\"datagqicmdrgcuzjmvkr\"],\"folder\":{\"name\":\"cqhgcmljzksqimy\"},\"\":{\"qpvhszopeukufds\":\"datavfiomhc\",\"fsjbpwjwz\":\"databsskgqjemosq\"}}") - .toObject(SalesforceMarketingCloudObjectDataset.class); - Assertions.assertEquals("pwmgwxwukfjv", model.description()); - Assertions.assertEquals("tlthrt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("slqcxuthv").type()); - Assertions.assertEquals("cqhgcmljzksqimy", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceMarketingCloudObjectDataset model = new SalesforceMarketingCloudObjectDataset() - .withDescription("pwmgwxwukfjv") - .withStructure("datalaxseisv") - .withSchema("datagyphheovejkpalec") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tlthrt") - .withParameters(mapOf("qefnquoll", "datavfssrg", "qqiehdhjofywwna", "datauurmuzem"))) - .withParameters( - mapOf("slqcxuthv", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datarx"))) - .withAnnotations(Arrays.asList("datalyyhrgmabspmlu", "datayju", "datakedputocrb", "datagqicmdrgcuzjmvkr")) - .withFolder(new DatasetFolder().withName("cqhgcmljzksqimy")) - .withTableName("datadsluokcevoxd"); - model = BinaryData.fromObject(model).toObject(SalesforceMarketingCloudObjectDataset.class); - Assertions.assertEquals("pwmgwxwukfjv", model.description()); - Assertions.assertEquals("tlthrt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("slqcxuthv").type()); - Assertions.assertEquals("cqhgcmljzksqimy", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java deleted file mode 100644 index 9836ff5397b3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceMarketingCloudSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceMarketingCloudSource; - -public final class SalesforceMarketingCloudSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceMarketingCloudSource model = BinaryData.fromString( - "{\"type\":\"SalesforceMarketingCloudSource\",\"query\":\"datajp\",\"queryTimeout\":\"datalrhnlxstpgzybezm\",\"additionalColumns\":\"dataqpdchdsxvkm\",\"sourceRetryCount\":\"datapxzgj\",\"sourceRetryWait\":\"datamtskto\",\"maxConcurrentConnections\":\"datay\",\"disableMetricsCollection\":\"datad\",\"\":{\"osugrfizfw\":\"dataso\",\"jqnienctwbimh\":\"datamaenwhqafzgzmo\",\"qam\":\"datamognnwxrdll\",\"fpnbyxygubvi\":\"datajyy\"}}") - .toObject(SalesforceMarketingCloudSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceMarketingCloudSource model = new SalesforceMarketingCloudSource().withSourceRetryCount("datapxzgj") - .withSourceRetryWait("datamtskto") - .withMaxConcurrentConnections("datay") - .withDisableMetricsCollection("datad") - .withQueryTimeout("datalrhnlxstpgzybezm") - .withAdditionalColumns("dataqpdchdsxvkm") - .withQuery("datajp"); - model = BinaryData.fromObject(model).toObject(SalesforceMarketingCloudSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java deleted file mode 100644 index 08e274b4bd8e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SalesforceObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceObject\",\"typeProperties\":{\"objectApiName\":\"datadwxhhlbm\"},\"description\":\"hfxnrpdhewokyqs\",\"structure\":\"dataxfsyw\",\"schema\":\"datahqbtodjfyxbvkv\",\"linkedServiceName\":{\"referenceName\":\"zdmvdd\",\"parameters\":{\"lrhocr\":\"datarugyozzzawnjdv\"}},\"parameters\":{\"hxtozfgdk\":{\"type\":\"String\",\"defaultValue\":\"datandqzbvbpsuv\"},\"igf\":{\"type\":\"Int\",\"defaultValue\":\"dataurklp\"},\"waqdzqydewuwxy\":{\"type\":\"Int\",\"defaultValue\":\"dataeutuipjclz\"}},\"annotations\":[\"datazzevtzqwczochwb\",\"datak\",\"datauynf\",\"datakyvnhiysdhork\"],\"folder\":{\"name\":\"raqkiwlw\"},\"\":{\"wmwqoguflte\":\"datala\",\"egefzjx\":\"datat\",\"thnvxwtdqtcbjd\":\"datajtqbgysi\"}}") - .toObject(SalesforceObjectDataset.class); - Assertions.assertEquals("hfxnrpdhewokyqs", model.description()); - Assertions.assertEquals("zdmvdd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hxtozfgdk").type()); - Assertions.assertEquals("raqkiwlw", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceObjectDataset model = new SalesforceObjectDataset().withDescription("hfxnrpdhewokyqs") - .withStructure("dataxfsyw") - .withSchema("datahqbtodjfyxbvkv") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("zdmvdd") - .withParameters(mapOf("lrhocr", "datarugyozzzawnjdv"))) - .withParameters(mapOf("hxtozfgdk", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datandqzbvbpsuv"), "igf", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataurklp"), - "waqdzqydewuwxy", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataeutuipjclz"))) - .withAnnotations(Arrays.asList("datazzevtzqwczochwb", "datak", "datauynf", "datakyvnhiysdhork")) - .withFolder(new DatasetFolder().withName("raqkiwlw")) - .withObjectApiName("datadwxhhlbm"); - model = BinaryData.fromObject(model).toObject(SalesforceObjectDataset.class); - Assertions.assertEquals("hfxnrpdhewokyqs", model.description()); - Assertions.assertEquals("zdmvdd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("hxtozfgdk").type()); - Assertions.assertEquals("raqkiwlw", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java deleted file mode 100644 index 4ea82006d303..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceObjectDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SalesforceObjectDatasetTypeProperties; - -public final class SalesforceObjectDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceObjectDatasetTypeProperties model = BinaryData.fromString("{\"objectApiName\":\"dataq\"}") - .toObject(SalesforceObjectDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceObjectDatasetTypeProperties model - = new SalesforceObjectDatasetTypeProperties().withObjectApiName("dataq"); - model = BinaryData.fromObject(model).toObject(SalesforceObjectDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java deleted file mode 100644 index 3a04d5bbcd5a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceServiceCloudObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudObject\",\"typeProperties\":{\"objectApiName\":\"dataxu\"},\"description\":\"aujqgbbjv\",\"structure\":\"dataoawh\",\"schema\":\"datasmbcsloygsab\",\"linkedServiceName\":{\"referenceName\":\"gdheronsd\",\"parameters\":{\"fdbqskgqjbvitp\":\"datakzvzuatqhgzuyxtr\",\"wkgjwb\":\"datapvsffavdhpiwrm\"}},\"parameters\":{\"megaj\":{\"type\":\"Object\",\"defaultValue\":\"databwxyldqtmggcpd\"},\"wqkkgeseip\":{\"type\":\"String\",\"defaultValue\":\"datatwymzs\"}},\"annotations\":[\"dataiupqscoobkj\"],\"folder\":{\"name\":\"xsqcom\"},\"\":{\"ncevxxkdevpxi\":\"datai\",\"tv\":\"dataziizmeqmdu\",\"p\":\"datadqx\"}}") - .toObject(SalesforceServiceCloudObjectDataset.class); - Assertions.assertEquals("aujqgbbjv", model.description()); - Assertions.assertEquals("gdheronsd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("megaj").type()); - Assertions.assertEquals("xsqcom", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudObjectDataset model - = new SalesforceServiceCloudObjectDataset().withDescription("aujqgbbjv") - .withStructure("dataoawh") - .withSchema("datasmbcsloygsab") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("gdheronsd") - .withParameters(mapOf("fdbqskgqjbvitp", "datakzvzuatqhgzuyxtr", "wkgjwb", "datapvsffavdhpiwrm"))) - .withParameters( - mapOf("megaj", - new ParameterSpecification().withType(ParameterType.OBJECT) - .withDefaultValue("databwxyldqtmggcpd"), - "wqkkgeseip", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatwymzs"))) - .withAnnotations(Arrays.asList("dataiupqscoobkj")) - .withFolder(new DatasetFolder().withName("xsqcom")) - .withObjectApiName("dataxu"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudObjectDataset.class); - Assertions.assertEquals("aujqgbbjv", model.description()); - Assertions.assertEquals("gdheronsd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("megaj").type()); - Assertions.assertEquals("xsqcom", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java deleted file mode 100644 index 2c379f468402..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudObjectDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudObjectDatasetTypeProperties; - -public final class SalesforceServiceCloudObjectDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"dataml\"}") - .toObject(SalesforceServiceCloudObjectDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudObjectDatasetTypeProperties model - = new SalesforceServiceCloudObjectDatasetTypeProperties().withObjectApiName("dataml"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudObjectDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java deleted file mode 100644 index 0cb8c542afc6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSinkTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudSink; -import com.azure.resourcemanager.datafactory.models.SalesforceSinkWriteBehavior; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceServiceCloudSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudSink model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudSink\",\"writeBehavior\":\"Insert\",\"externalIdFieldName\":\"dataks\",\"ignoreNullValues\":\"dataehrajbatgmxkolt\",\"writeBatchSize\":\"dataqjcmkpxbc\",\"writeBatchTimeout\":\"datarfkwc\",\"sinkRetryCount\":\"datamyowddhtwaxob\",\"sinkRetryWait\":\"dataatqocvrdjpvs\",\"maxConcurrentConnections\":\"datawpsteuvjd\",\"disableMetricsCollection\":\"dataocyvymvn\",\"\":{\"bfomo\":\"datahitxo\",\"rhhbvbqxtktkeuap\":\"datacyn\",\"bhptraljcq\":\"datamoofb\"}}") - .toObject(SalesforceServiceCloudSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.INSERT, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudSink model = new SalesforceServiceCloudSink().withWriteBatchSize("dataqjcmkpxbc") - .withWriteBatchTimeout("datarfkwc") - .withSinkRetryCount("datamyowddhtwaxob") - .withSinkRetryWait("dataatqocvrdjpvs") - .withMaxConcurrentConnections("datawpsteuvjd") - .withDisableMetricsCollection("dataocyvymvn") - .withWriteBehavior(SalesforceSinkWriteBehavior.INSERT) - .withExternalIdFieldName("dataks") - .withIgnoreNullValues("dataehrajbatgmxkolt"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.INSERT, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java deleted file mode 100644 index d6ec5b1bfa1c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudSource; - -public final class SalesforceServiceCloudSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudSource model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudSource\",\"query\":\"datarjamztvnmr\",\"readBehavior\":\"datatypuotmkbofuh\",\"additionalColumns\":\"dataksgou\",\"sourceRetryCount\":\"dataegtn\",\"sourceRetryWait\":\"datanotrgyyje\",\"maxConcurrentConnections\":\"dataovjdunbaetsdufe\",\"disableMetricsCollection\":\"datavvkuzygzrzubdt\",\"\":{\"mhzpurnp\":\"datac\",\"albx\":\"datakbxkzcfios\",\"nluvcwuafbhxoa\":\"datad\",\"ogzawfoqdnxu\":\"datafpqfpkrmlbkv\"}}") - .toObject(SalesforceServiceCloudSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudSource model = new SalesforceServiceCloudSource().withSourceRetryCount("dataegtn") - .withSourceRetryWait("datanotrgyyje") - .withMaxConcurrentConnections("dataovjdunbaetsdufe") - .withDisableMetricsCollection("datavvkuzygzrzubdt") - .withQuery("datarjamztvnmr") - .withReadBehavior("datatypuotmkbofuh") - .withAdditionalColumns("dataksgou"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java deleted file mode 100644 index 9c3e63b7100b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2ObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceServiceCloudV2ObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Object\",\"typeProperties\":{\"objectApiName\":\"datadtejobjzrlwt\",\"reportId\":\"datacnzalgmpupjh\"},\"description\":\"ylyu\",\"structure\":\"datavb\",\"schema\":\"datazlzj\",\"linkedServiceName\":{\"referenceName\":\"bkpcutz\",\"parameters\":{\"imbdqraokdarluob\":\"dataydpoknse\",\"j\":\"datavalqwzkny\",\"dlff\":\"dataysvclfjyclvi\",\"fouzfbpg\":\"dataleirmtxfqpfildcg\"}},\"parameters\":{\"jatgngwn\":{\"type\":\"Int\",\"defaultValue\":\"datadbzdylb\"},\"dq\":{\"type\":\"Bool\",\"defaultValue\":\"datatecgprzsqm\"},\"oyxuucol\":{\"type\":\"Array\",\"defaultValue\":\"datazbyqhaath\"},\"lhrvmgsbpgmncr\":{\"type\":\"SecureString\",\"defaultValue\":\"dataruxrzh\"}},\"annotations\":[\"dataiforomppzsauq\"],\"folder\":{\"name\":\"hplfcmpuaiugoc\"},\"\":{\"iccu\":\"datalyspjym\",\"qwbbxiwtwfgo\":\"datancfunlakgixhqjqh\",\"bkyy\":\"datalalvemnnzug\"}}") - .toObject(SalesforceServiceCloudV2ObjectDataset.class); - Assertions.assertEquals("ylyu", model.description()); - Assertions.assertEquals("bkpcutz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("jatgngwn").type()); - Assertions.assertEquals("hplfcmpuaiugoc", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudV2ObjectDataset model - = new SalesforceServiceCloudV2ObjectDataset().withDescription("ylyu") - .withStructure("datavb") - .withSchema("datazlzj") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bkpcutz") - .withParameters(mapOf("imbdqraokdarluob", "dataydpoknse", "j", "datavalqwzkny", "dlff", - "dataysvclfjyclvi", "fouzfbpg", "dataleirmtxfqpfildcg"))) - .withParameters(mapOf("jatgngwn", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datadbzdylb"), "dq", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datatecgprzsqm"), - "oyxuucol", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datazbyqhaath"), - "lhrvmgsbpgmncr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataruxrzh"))) - .withAnnotations(Arrays.asList("dataiforomppzsauq")) - .withFolder(new DatasetFolder().withName("hplfcmpuaiugoc")) - .withObjectApiName("datadtejobjzrlwt") - .withReportId("datacnzalgmpupjh"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2ObjectDataset.class); - Assertions.assertEquals("ylyu", model.description()); - Assertions.assertEquals("bkpcutz", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("jatgngwn").type()); - Assertions.assertEquals("hplfcmpuaiugoc", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java deleted file mode 100644 index 06d9442d2999..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SalesforceServiceCloudV2ObjectDatasetTypeProperties; - -public final class SalesforceServiceCloudV2ObjectDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudV2ObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"datay\",\"reportId\":\"dataohlmtsnv\"}") - .toObject(SalesforceServiceCloudV2ObjectDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudV2ObjectDatasetTypeProperties model - = new SalesforceServiceCloudV2ObjectDatasetTypeProperties().withObjectApiName("datay") - .withReportId("dataohlmtsnv"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2ObjectDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java deleted file mode 100644 index 5418a801971e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SinkTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2Sink; -import com.azure.resourcemanager.datafactory.models.SalesforceV2SinkWriteBehavior; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceServiceCloudV2SinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudV2Sink model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Sink\",\"writeBehavior\":\"Insert\",\"externalIdFieldName\":\"dataclf\",\"ignoreNullValues\":\"datauet\",\"writeBatchSize\":\"datapc\",\"writeBatchTimeout\":\"datagrtgnvl\",\"sinkRetryCount\":\"datamizhdxsybn\",\"sinkRetryWait\":\"datagv\",\"maxConcurrentConnections\":\"datanplv\",\"disableMetricsCollection\":\"datakmp\",\"\":{\"zauumzwlr\":\"datalvinxwtxtetwqk\",\"plng\":\"datarvkneo\",\"yfeqajtzquhqrj\":\"datazvugqwxslisgfx\",\"izjwugr\":\"datallgrckoxkpjzyc\"}}") - .toObject(SalesforceServiceCloudV2Sink.class); - Assertions.assertEquals(SalesforceV2SinkWriteBehavior.INSERT, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudV2Sink model = new SalesforceServiceCloudV2Sink().withWriteBatchSize("datapc") - .withWriteBatchTimeout("datagrtgnvl") - .withSinkRetryCount("datamizhdxsybn") - .withSinkRetryWait("datagv") - .withMaxConcurrentConnections("datanplv") - .withDisableMetricsCollection("datakmp") - .withWriteBehavior(SalesforceV2SinkWriteBehavior.INSERT) - .withExternalIdFieldName("dataclf") - .withIgnoreNullValues("datauet"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2Sink.class); - Assertions.assertEquals(SalesforceV2SinkWriteBehavior.INSERT, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java deleted file mode 100644 index 9810e33a5739..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceServiceCloudV2SourceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceServiceCloudV2Source; - -public final class SalesforceServiceCloudV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceServiceCloudV2Source model = BinaryData.fromString( - "{\"type\":\"SalesforceServiceCloudV2Source\",\"SOQLQuery\":\"dataxbrfqi\",\"query\":\"datawfxmdotdgvsoyp\",\"includeDeletedObjects\":\"dataoqvczdb\",\"additionalColumns\":\"datazcql\",\"sourceRetryCount\":\"datahbkapbgmjodf\",\"sourceRetryWait\":\"datahlipxkxhj\",\"maxConcurrentConnections\":\"datavsjuvjmnsgvf\",\"disableMetricsCollection\":\"dataomdplvgl\",\"\":{\"ydxmplxzrofscib\":\"datapiwpi\",\"ri\":\"datatxyjq\"}}") - .toObject(SalesforceServiceCloudV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceServiceCloudV2Source model - = new SalesforceServiceCloudV2Source().withSourceRetryCount("datahbkapbgmjodf") - .withSourceRetryWait("datahlipxkxhj") - .withMaxConcurrentConnections("datavsjuvjmnsgvf") - .withDisableMetricsCollection("dataomdplvgl") - .withSoqlQuery("dataxbrfqi") - .withQuery("datawfxmdotdgvsoyp") - .withIncludeDeletedObjects("dataoqvczdb") - .withAdditionalColumns("datazcql"); - model = BinaryData.fromObject(model).toObject(SalesforceServiceCloudV2Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java deleted file mode 100644 index c415f26a076d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSinkTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceSink; -import com.azure.resourcemanager.datafactory.models.SalesforceSinkWriteBehavior; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceSink model = BinaryData.fromString( - "{\"type\":\"SalesforceSink\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datav\",\"ignoreNullValues\":\"datantjsmjxgqs\",\"writeBatchSize\":\"datacvaa\",\"writeBatchTimeout\":\"datavbzcqgtz\",\"sinkRetryCount\":\"datalrmrtdznvjgovy\",\"sinkRetryWait\":\"datappswlept\",\"maxConcurrentConnections\":\"databrkntfwxkeuyxgpc\",\"disableMetricsCollection\":\"datavmrdlckpznov\",\"\":{\"kzysdhars\":\"datawpaiq\",\"lthnn\":\"datahqmrpdxnrdvtvty\"}}") - .toObject(SalesforceSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceSink model = new SalesforceSink().withWriteBatchSize("datacvaa") - .withWriteBatchTimeout("datavbzcqgtz") - .withSinkRetryCount("datalrmrtdznvjgovy") - .withSinkRetryWait("datappswlept") - .withMaxConcurrentConnections("databrkntfwxkeuyxgpc") - .withDisableMetricsCollection("datavmrdlckpznov") - .withWriteBehavior(SalesforceSinkWriteBehavior.UPSERT) - .withExternalIdFieldName("datav") - .withIgnoreNullValues("datantjsmjxgqs"); - model = BinaryData.fromObject(model).toObject(SalesforceSink.class); - Assertions.assertEquals(SalesforceSinkWriteBehavior.UPSERT, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java deleted file mode 100644 index b94746e46541..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceSource; - -public final class SalesforceSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceSource model = BinaryData.fromString( - "{\"type\":\"SalesforceSource\",\"query\":\"dataowlammvazvwzi\",\"readBehavior\":\"dataijunmgdpxeivr\",\"queryTimeout\":\"dataxdnk\",\"additionalColumns\":\"datatfgcuzvbrehdtqg\",\"sourceRetryCount\":\"dataahngnrseiid\",\"sourceRetryWait\":\"datawbybmxfhzzgolfe\",\"maxConcurrentConnections\":\"dataibqilbpyj\",\"disableMetricsCollection\":\"datayeml\",\"\":{\"zvv\":\"datavz\",\"bheyxewcsk\":\"datarougalywgqrevb\",\"fvvxi\":\"datavkwbpr\"}}") - .toObject(SalesforceSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceSource model = new SalesforceSource().withSourceRetryCount("dataahngnrseiid") - .withSourceRetryWait("datawbybmxfhzzgolfe") - .withMaxConcurrentConnections("dataibqilbpyj") - .withDisableMetricsCollection("datayeml") - .withQueryTimeout("dataxdnk") - .withAdditionalColumns("datatfgcuzvbrehdtqg") - .withQuery("dataowlammvazvwzi") - .withReadBehavior("dataijunmgdpxeivr"); - model = BinaryData.fromObject(model).toObject(SalesforceSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java deleted file mode 100644 index 46cc919b74be..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SalesforceV2ObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceV2ObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Object\",\"typeProperties\":{\"objectApiName\":\"datahoxfzz\",\"reportId\":\"datamupbus\"},\"description\":\"ugozwplxzgzumnot\",\"structure\":\"dataxkkbygbgiqkwys\",\"schema\":\"databbnhtt\",\"linkedServiceName\":{\"referenceName\":\"izonzsur\",\"parameters\":{\"oxyipdthjfvnh\":\"datajasfzhzzcarci\"}},\"parameters\":{\"ssjgbfbb\":{\"type\":\"Bool\",\"defaultValue\":\"dataputfelfchnu\"}},\"annotations\":[\"dataxxczzunfnbp\"],\"folder\":{\"name\":\"eivkb\"},\"\":{\"nuqhqp\":\"dataepgh\",\"ttfsclgg\":\"datat\",\"bfytnhdnihuzzjuz\":\"datagygn\"}}") - .toObject(SalesforceV2ObjectDataset.class); - Assertions.assertEquals("ugozwplxzgzumnot", model.description()); - Assertions.assertEquals("izonzsur", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ssjgbfbb").type()); - Assertions.assertEquals("eivkb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceV2ObjectDataset model = new SalesforceV2ObjectDataset().withDescription("ugozwplxzgzumnot") - .withStructure("dataxkkbygbgiqkwys") - .withSchema("databbnhtt") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("izonzsur") - .withParameters(mapOf("oxyipdthjfvnh", "datajasfzhzzcarci"))) - .withParameters(mapOf("ssjgbfbb", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataputfelfchnu"))) - .withAnnotations(Arrays.asList("dataxxczzunfnbp")) - .withFolder(new DatasetFolder().withName("eivkb")) - .withObjectApiName("datahoxfzz") - .withReportId("datamupbus"); - model = BinaryData.fromObject(model).toObject(SalesforceV2ObjectDataset.class); - Assertions.assertEquals("ugozwplxzgzumnot", model.description()); - Assertions.assertEquals("izonzsur", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("ssjgbfbb").type()); - Assertions.assertEquals("eivkb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java deleted file mode 100644 index 5ace317240d8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2ObjectDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SalesforceV2ObjectDatasetTypeProperties; - -public final class SalesforceV2ObjectDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceV2ObjectDatasetTypeProperties model - = BinaryData.fromString("{\"objectApiName\":\"datagbzdto\",\"reportId\":\"dataiwnyfzdpxctsu\"}") - .toObject(SalesforceV2ObjectDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceV2ObjectDatasetTypeProperties model - = new SalesforceV2ObjectDatasetTypeProperties().withObjectApiName("datagbzdto") - .withReportId("dataiwnyfzdpxctsu"); - model = BinaryData.fromObject(model).toObject(SalesforceV2ObjectDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java deleted file mode 100644 index 3f2762f75551..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SinkTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceV2Sink; -import com.azure.resourcemanager.datafactory.models.SalesforceV2SinkWriteBehavior; -import org.junit.jupiter.api.Assertions; - -public final class SalesforceV2SinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceV2Sink model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Sink\",\"writeBehavior\":\"Upsert\",\"externalIdFieldName\":\"datakpumzda\",\"ignoreNullValues\":\"dataoqeteavphup\",\"writeBatchSize\":\"datarp\",\"writeBatchTimeout\":\"datavgihknnvjgccq\",\"sinkRetryCount\":\"dataewofhjonqkbnr\",\"sinkRetryWait\":\"dataattzxvfsrufj\",\"maxConcurrentConnections\":\"datavryfb\",\"disableMetricsCollection\":\"datavzgy\",\"\":{\"dngtylvdumpmx\":\"dataveiy\",\"kmrvgdjbl\":\"datafkbbchdypc\"}}") - .toObject(SalesforceV2Sink.class); - Assertions.assertEquals(SalesforceV2SinkWriteBehavior.UPSERT, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceV2Sink model = new SalesforceV2Sink().withWriteBatchSize("datarp") - .withWriteBatchTimeout("datavgihknnvjgccq") - .withSinkRetryCount("dataewofhjonqkbnr") - .withSinkRetryWait("dataattzxvfsrufj") - .withMaxConcurrentConnections("datavryfb") - .withDisableMetricsCollection("datavzgy") - .withWriteBehavior(SalesforceV2SinkWriteBehavior.UPSERT) - .withExternalIdFieldName("datakpumzda") - .withIgnoreNullValues("dataoqeteavphup"); - model = BinaryData.fromObject(model).toObject(SalesforceV2Sink.class); - Assertions.assertEquals(SalesforceV2SinkWriteBehavior.UPSERT, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java deleted file mode 100644 index 99afea7cca68..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SalesforceV2SourceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SalesforceV2Source; - -public final class SalesforceV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SalesforceV2Source model = BinaryData.fromString( - "{\"type\":\"SalesforceV2Source\",\"SOQLQuery\":\"datanf\",\"query\":\"datatku\",\"includeDeletedObjects\":\"datadeqrpuamgijevf\",\"queryTimeout\":\"datavuokwjmt\",\"additionalColumns\":\"datapfni\",\"sourceRetryCount\":\"datak\",\"sourceRetryWait\":\"dataejtdlqorcypskwfa\",\"maxConcurrentConnections\":\"dataz\",\"disableMetricsCollection\":\"datakruclzm\",\"\":{\"hayqxlcrshozuje\":\"dataqgihducv\",\"vtzrg\":\"dataobfviscauudxf\"}}") - .toObject(SalesforceV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SalesforceV2Source model = new SalesforceV2Source().withSourceRetryCount("datak") - .withSourceRetryWait("dataejtdlqorcypskwfa") - .withMaxConcurrentConnections("dataz") - .withDisableMetricsCollection("datakruclzm") - .withQueryTimeout("datavuokwjmt") - .withAdditionalColumns("datapfni") - .withSoqlQuery("datanf") - .withQuery("datatku") - .withIncludeDeletedObjects("datadeqrpuamgijevf"); - model = BinaryData.fromObject(model).toObject(SalesforceV2Source.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java deleted file mode 100644 index 9cf775da5145..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwCubeDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapBwCubeDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapBwCubeDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapBwCubeDataset model = BinaryData.fromString( - "{\"type\":\"SapBwCube\",\"description\":\"kwakkchsf\",\"structure\":\"datalb\",\"schema\":\"datacxu\",\"linkedServiceName\":{\"referenceName\":\"bsdqbdybl\",\"parameters\":{\"m\":\"datat\",\"lerufollcshju\":\"datapgweoqhbjqlqf\",\"xvjeazrah\":\"dataihbymjjvtpne\",\"qamcthtpqgfz\":\"datalhbimyii\"}},\"parameters\":{\"vflgzhc\":{\"type\":\"Bool\",\"defaultValue\":\"datakdi\"},\"pccxziv\":{\"type\":\"Object\",\"defaultValue\":\"datawahcrxofgrutv\"},\"jd\":{\"type\":\"SecureString\",\"defaultValue\":\"datahzghhhkvn\"}},\"annotations\":[\"dataq\"],\"folder\":{\"name\":\"njvpmxn\"},\"\":{\"olrwvtlgxyfj\":\"datazstqlf\"}}") - .toObject(SapBwCubeDataset.class); - Assertions.assertEquals("kwakkchsf", model.description()); - Assertions.assertEquals("bsdqbdybl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vflgzhc").type()); - Assertions.assertEquals("njvpmxn", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapBwCubeDataset model = new SapBwCubeDataset().withDescription("kwakkchsf") - .withStructure("datalb") - .withSchema("datacxu") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("bsdqbdybl") - .withParameters(mapOf("m", "datat", "lerufollcshju", "datapgweoqhbjqlqf", "xvjeazrah", - "dataihbymjjvtpne", "qamcthtpqgfz", "datalhbimyii"))) - .withParameters(mapOf("vflgzhc", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datakdi"), "pccxziv", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datawahcrxofgrutv"), "jd", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahzghhhkvn"))) - .withAnnotations(Arrays.asList("dataq")) - .withFolder(new DatasetFolder().withName("njvpmxn")); - model = BinaryData.fromObject(model).toObject(SapBwCubeDataset.class); - Assertions.assertEquals("kwakkchsf", model.description()); - Assertions.assertEquals("bsdqbdybl", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("vflgzhc").type()); - Assertions.assertEquals("njvpmxn", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java deleted file mode 100644 index 84a6a995e60e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapBwSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapBwSource; - -public final class SapBwSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapBwSource model = BinaryData.fromString( - "{\"type\":\"SapBwSource\",\"query\":\"datadsgrt\",\"queryTimeout\":\"datavimrupg\",\"additionalColumns\":\"datajmandrvv\",\"sourceRetryCount\":\"dataklbfvtzdtw\",\"sourceRetryWait\":\"datajlpkocrexfmqfuf\",\"maxConcurrentConnections\":\"datapuvwlfzjrjglacpz\",\"disableMetricsCollection\":\"datascgslwujkeytpm\",\"\":{\"ujphqvfx\":\"datannbmodsytqtva\",\"oxwpiqkkmpfnwdr\":\"datavogwgh\"}}") - .toObject(SapBwSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapBwSource model = new SapBwSource().withSourceRetryCount("dataklbfvtzdtw") - .withSourceRetryWait("datajlpkocrexfmqfuf") - .withMaxConcurrentConnections("datapuvwlfzjrjglacpz") - .withDisableMetricsCollection("datascgslwujkeytpm") - .withQueryTimeout("datavimrupg") - .withAdditionalColumns("datajmandrvv") - .withQuery("datadsgrt"); - model = BinaryData.fromObject(model).toObject(SapBwSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java deleted file mode 100644 index 9eef300abc56..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapCloudForCustomerResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapCloudForCustomerResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapCloudForCustomerResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerResource\",\"typeProperties\":{\"path\":\"datawequfl\"},\"description\":\"yopoa\",\"structure\":\"datawwgw\",\"schema\":\"datab\",\"linkedServiceName\":{\"referenceName\":\"tbvufrkwjiemim\",\"parameters\":{\"reeedddrftfquul\":\"datapowewjs\"}},\"parameters\":{\"b\":{\"type\":\"Bool\",\"defaultValue\":\"dataiigeeu\"}},\"annotations\":[\"datayxfedqnetd\",\"datawynxoqgvbz\"],\"folder\":{\"name\":\"pw\"},\"\":{\"a\":\"dataobqajejir\"}}") - .toObject(SapCloudForCustomerResourceDataset.class); - Assertions.assertEquals("yopoa", model.description()); - Assertions.assertEquals("tbvufrkwjiemim", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("b").type()); - Assertions.assertEquals("pw", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapCloudForCustomerResourceDataset model = new SapCloudForCustomerResourceDataset().withDescription("yopoa") - .withStructure("datawwgw") - .withSchema("datab") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("tbvufrkwjiemim") - .withParameters(mapOf("reeedddrftfquul", "datapowewjs"))) - .withParameters( - mapOf("b", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataiigeeu"))) - .withAnnotations(Arrays.asList("datayxfedqnetd", "datawynxoqgvbz")) - .withFolder(new DatasetFolder().withName("pw")) - .withPath("datawequfl"); - model = BinaryData.fromObject(model).toObject(SapCloudForCustomerResourceDataset.class); - Assertions.assertEquals("yopoa", model.description()); - Assertions.assertEquals("tbvufrkwjiemim", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("b").type()); - Assertions.assertEquals("pw", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java deleted file mode 100644 index 4eec472d4773..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapCloudForCustomerResourceDatasetTypeProperties; - -public final class SapCloudForCustomerResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapCloudForCustomerResourceDatasetTypeProperties model - = BinaryData.fromString("{\"path\":\"datarvkgpogplbjuvl\"}") - .toObject(SapCloudForCustomerResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapCloudForCustomerResourceDatasetTypeProperties model - = new SapCloudForCustomerResourceDatasetTypeProperties().withPath("datarvkgpogplbjuvl"); - model = BinaryData.fromObject(model).toObject(SapCloudForCustomerResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java deleted file mode 100644 index b74d2ff49809..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSinkTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSink; -import com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSinkWriteBehavior; -import org.junit.jupiter.api.Assertions; - -public final class SapCloudForCustomerSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapCloudForCustomerSink model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerSink\",\"writeBehavior\":\"Insert\",\"httpRequestTimeout\":\"dataty\",\"writeBatchSize\":\"dataacgukierdq\",\"writeBatchTimeout\":\"dataassiiilcmrgahsc\",\"sinkRetryCount\":\"datayxgcgbvieqonsbu\",\"sinkRetryWait\":\"datanxdivqopxunooxtk\",\"maxConcurrentConnections\":\"datanac\",\"disableMetricsCollection\":\"datazcytbhdjpagwszmw\",\"\":{\"vqg\":\"datafeyexbgdfyoszwi\",\"nvdabaodiytxq\":\"dataeacqjgedxpbpjwz\"}}") - .toObject(SapCloudForCustomerSink.class); - Assertions.assertEquals(SapCloudForCustomerSinkWriteBehavior.INSERT, model.writeBehavior()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapCloudForCustomerSink model = new SapCloudForCustomerSink().withWriteBatchSize("dataacgukierdq") - .withWriteBatchTimeout("dataassiiilcmrgahsc") - .withSinkRetryCount("datayxgcgbvieqonsbu") - .withSinkRetryWait("datanxdivqopxunooxtk") - .withMaxConcurrentConnections("datanac") - .withDisableMetricsCollection("datazcytbhdjpagwszmw") - .withWriteBehavior(SapCloudForCustomerSinkWriteBehavior.INSERT) - .withHttpRequestTimeout("dataty"); - model = BinaryData.fromObject(model).toObject(SapCloudForCustomerSink.class); - Assertions.assertEquals(SapCloudForCustomerSinkWriteBehavior.INSERT, model.writeBehavior()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java deleted file mode 100644 index 714f3f4f7787..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapCloudForCustomerSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapCloudForCustomerSource; - -public final class SapCloudForCustomerSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapCloudForCustomerSource model = BinaryData.fromString( - "{\"type\":\"SapCloudForCustomerSource\",\"query\":\"datafvnvscyutqwsynt\",\"httpRequestTimeout\":\"datarluqaqn\",\"queryTimeout\":\"datag\",\"additionalColumns\":\"datab\",\"sourceRetryCount\":\"datajbozkl\",\"sourceRetryWait\":\"dataifvpsmvk\",\"maxConcurrentConnections\":\"datauw\",\"disableMetricsCollection\":\"datanplqf\",\"\":{\"ov\":\"datafqmdjz\",\"rjkmpaxoe\":\"datakp\",\"mqzagrqcqhwfs\":\"datalpofaog\"}}") - .toObject(SapCloudForCustomerSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapCloudForCustomerSource model = new SapCloudForCustomerSource().withSourceRetryCount("datajbozkl") - .withSourceRetryWait("dataifvpsmvk") - .withMaxConcurrentConnections("datauw") - .withDisableMetricsCollection("datanplqf") - .withQueryTimeout("datag") - .withAdditionalColumns("datab") - .withQuery("datafvnvscyutqwsynt") - .withHttpRequestTimeout("datarluqaqn"); - model = BinaryData.fromObject(model).toObject(SapCloudForCustomerSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java deleted file mode 100644 index e4d077316941..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapEccResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapEccResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapEccResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapEccResource\",\"typeProperties\":{\"path\":\"datahxnrn\"},\"description\":\"inaegesbx\",\"structure\":\"datapqm\",\"schema\":\"dataoplukfy\",\"linkedServiceName\":{\"referenceName\":\"kfhsovadkr\",\"parameters\":{\"vporiwbwggi\":\"datamwqpdkesjqbzkqm\",\"ikwsbzrhdugq\":\"datatspzjnrr\",\"ru\":\"datahtrgz\",\"nczk\":\"datajfhrjhiycbause\"}},\"parameters\":{\"jsqwzszoszjgz\":{\"type\":\"Array\",\"defaultValue\":\"datauw\"},\"qhcza\":{\"type\":\"Array\",\"defaultValue\":\"datafnyskwwu\"},\"lhipcukvbljpxpr\":{\"type\":\"Int\",\"defaultValue\":\"datam\"},\"f\":{\"type\":\"Object\",\"defaultValue\":\"datahyluqalpcufj\"}},\"annotations\":[\"dataztqdstahhh\"],\"folder\":{\"name\":\"xxsri\"},\"\":{\"yefmxwoqotii\":\"datawbaaes\"}}") - .toObject(SapEccResourceDataset.class); - Assertions.assertEquals("inaegesbx", model.description()); - Assertions.assertEquals("kfhsovadkr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jsqwzszoszjgz").type()); - Assertions.assertEquals("xxsri", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapEccResourceDataset model = new SapEccResourceDataset().withDescription("inaegesbx") - .withStructure("datapqm") - .withSchema("dataoplukfy") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kfhsovadkr") - .withParameters(mapOf("vporiwbwggi", "datamwqpdkesjqbzkqm", "ikwsbzrhdugq", "datatspzjnrr", "ru", - "datahtrgz", "nczk", "datajfhrjhiycbause"))) - .withParameters(mapOf("jsqwzszoszjgz", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datauw"), "qhcza", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datafnyskwwu"), - "lhipcukvbljpxpr", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datam"), - "f", new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datahyluqalpcufj"))) - .withAnnotations(Arrays.asList("dataztqdstahhh")) - .withFolder(new DatasetFolder().withName("xxsri")) - .withPath("datahxnrn"); - model = BinaryData.fromObject(model).toObject(SapEccResourceDataset.class); - Assertions.assertEquals("inaegesbx", model.description()); - Assertions.assertEquals("kfhsovadkr", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("jsqwzszoszjgz").type()); - Assertions.assertEquals("xxsri", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java deleted file mode 100644 index 42b0c1475921..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapEccResourceDatasetTypeProperties; - -public final class SapEccResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapEccResourceDatasetTypeProperties model = BinaryData.fromString("{\"path\":\"databgpasrvrmti\"}") - .toObject(SapEccResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapEccResourceDatasetTypeProperties model - = new SapEccResourceDatasetTypeProperties().withPath("databgpasrvrmti"); - model = BinaryData.fromObject(model).toObject(SapEccResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java deleted file mode 100644 index 7ffb393ad1ea..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapEccSourceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapEccSource; - -public final class SapEccSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapEccSource model = BinaryData.fromString( - "{\"type\":\"SapEccSource\",\"query\":\"datakd\",\"httpRequestTimeout\":\"datakdpn\",\"queryTimeout\":\"datadwcxjv\",\"additionalColumns\":\"datal\",\"sourceRetryCount\":\"dataxcmcccotqocnryyp\",\"sourceRetryWait\":\"dataduldsolbz\",\"maxConcurrentConnections\":\"datarufkeylkpvaagrd\",\"disableMetricsCollection\":\"datavglqdsphvo\",\"\":{\"thzfotfrfhrjka\":\"dataryhuo\",\"nitrmzvnrfkzn\":\"datadofshgmqxwop\",\"rnxrjmilogcn\":\"dataaittbmobrxhwpg\",\"plrtxhzt\":\"datafgqibb\"}}") - .toObject(SapEccSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapEccSource model = new SapEccSource().withSourceRetryCount("dataxcmcccotqocnryyp") - .withSourceRetryWait("dataduldsolbz") - .withMaxConcurrentConnections("datarufkeylkpvaagrd") - .withDisableMetricsCollection("datavglqdsphvo") - .withQueryTimeout("datadwcxjv") - .withAdditionalColumns("datal") - .withQuery("datakd") - .withHttpRequestTimeout("datakdpn"); - model = BinaryData.fromObject(model).toObject(SapEccSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java deleted file mode 100644 index 8f30f49100bb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaPartitionSettingsTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapHanaPartitionSettings; - -public final class SapHanaPartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapHanaPartitionSettings model - = BinaryData.fromString("{\"partitionColumnName\":\"datac\"}").toObject(SapHanaPartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapHanaPartitionSettings model = new SapHanaPartitionSettings().withPartitionColumnName("datac"); - model = BinaryData.fromObject(model).toObject(SapHanaPartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java deleted file mode 100644 index 60acac1771fa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaSourceTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapHanaPartitionSettings; -import com.azure.resourcemanager.datafactory.models.SapHanaSource; - -public final class SapHanaSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapHanaSource model = BinaryData.fromString( - "{\"type\":\"SapHanaSource\",\"query\":\"datawyrsfj\",\"packetSize\":\"dataoyusrbuydeyh\",\"partitionOption\":\"datattkdrblehenj\",\"partitionSettings\":{\"partitionColumnName\":\"datawdeosbijikjfji\"},\"queryTimeout\":\"datawhbpojujpifxtg\",\"additionalColumns\":\"dataavfjx\",\"sourceRetryCount\":\"datawx\",\"sourceRetryWait\":\"dataauh\",\"maxConcurrentConnections\":\"datachphovu\",\"disableMetricsCollection\":\"datasczwcxlncoh\",\"\":{\"kbdozsspfwmf\":\"datavyriawfwwsg\",\"xsthj\":\"dataort\",\"ilhosot\":\"datayirybuqmkmwdoknv\",\"r\":\"dataioxgsrhxoyrg\"}}") - .toObject(SapHanaSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapHanaSource model = new SapHanaSource().withSourceRetryCount("datawx") - .withSourceRetryWait("dataauh") - .withMaxConcurrentConnections("datachphovu") - .withDisableMetricsCollection("datasczwcxlncoh") - .withQueryTimeout("datawhbpojujpifxtg") - .withAdditionalColumns("dataavfjx") - .withQuery("datawyrsfj") - .withPacketSize("dataoyusrbuydeyh") - .withPartitionOption("datattkdrblehenj") - .withPartitionSettings(new SapHanaPartitionSettings().withPartitionColumnName("datawdeosbijikjfji")); - model = BinaryData.fromObject(model).toObject(SapHanaSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java deleted file mode 100644 index c738de6442fc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapHanaTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapHanaTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapHanaTableDataset model = BinaryData.fromString( - "{\"type\":\"SapHanaTable\",\"typeProperties\":{\"schema\":\"dataikjhorlxkpypkenn\",\"table\":\"datantrqxxwtdmb\"},\"description\":\"tsuhqhtoxtd\",\"structure\":\"dataavfxbqmzxsya\",\"schema\":\"datainpaamihwbgh\",\"linkedServiceName\":{\"referenceName\":\"wt\",\"parameters\":{\"gsf\":\"datagch\",\"sqbjsdj\":\"datahbjki\",\"sgw\":\"datagxe\",\"ivoveomkhfeqcoop\":\"datacfferznzc\"}},\"parameters\":{\"xsuloutnpb\":{\"type\":\"String\",\"defaultValue\":\"dataimgckycjpeebznbz\"},\"cdmwk\":{\"type\":\"Int\",\"defaultValue\":\"dataoqohgp\"}},\"annotations\":[\"datafhsl\",\"dataqd\",\"dataa\"],\"folder\":{\"name\":\"nyevdyzdsytciks\"},\"\":{\"mvhadrpbatvy\":\"dataamwuynfxkcgsf\",\"qwsyjtvjkowggxaw\":\"datakljq\",\"ov\":\"datadmbypnkteiidl\",\"cnerekyjul\":\"databclprgeganihk\"}}") - .toObject(SapHanaTableDataset.class); - Assertions.assertEquals("tsuhqhtoxtd", model.description()); - Assertions.assertEquals("wt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("xsuloutnpb").type()); - Assertions.assertEquals("nyevdyzdsytciks", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapHanaTableDataset model = new SapHanaTableDataset().withDescription("tsuhqhtoxtd") - .withStructure("dataavfxbqmzxsya") - .withSchema("datainpaamihwbgh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("wt") - .withParameters(mapOf("gsf", "datagch", "sqbjsdj", "datahbjki", "sgw", "datagxe", "ivoveomkhfeqcoop", - "datacfferznzc"))) - .withParameters(mapOf("xsuloutnpb", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataimgckycjpeebznbz"), - "cdmwk", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataoqohgp"))) - .withAnnotations(Arrays.asList("datafhsl", "dataqd", "dataa")) - .withFolder(new DatasetFolder().withName("nyevdyzdsytciks")) - .withSchemaTypePropertiesSchema("dataikjhorlxkpypkenn") - .withTable("datantrqxxwtdmb"); - model = BinaryData.fromObject(model).toObject(SapHanaTableDataset.class); - Assertions.assertEquals("tsuhqhtoxtd", model.description()); - Assertions.assertEquals("wt", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("xsuloutnpb").type()); - Assertions.assertEquals("nyevdyzdsytciks", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java deleted file mode 100644 index 8899f56a4450..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapHanaTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapHanaTableDatasetTypeProperties; - -public final class SapHanaTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapHanaTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datawwnqhqlqgpw\",\"table\":\"datavcebavvbvxwv\"}") - .toObject(SapHanaTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapHanaTableDatasetTypeProperties model - = new SapHanaTableDatasetTypeProperties().withSchema("datawwnqhqlqgpw").withTable("datavcebavvbvxwv"); - model = BinaryData.fromObject(model).toObject(SapHanaTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java deleted file mode 100644 index 98424c081482..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapOdpResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapOdpResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOdpResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapOdpResource\",\"typeProperties\":{\"context\":\"datapuz\",\"objectName\":\"datadegefxlieggoto\"},\"description\":\"hssfnwh\",\"structure\":\"dataah\",\"schema\":\"datackn\",\"linkedServiceName\":{\"referenceName\":\"mkcu\",\"parameters\":{\"dvhzfkdn\":\"datadgwuzron\",\"k\":\"dataysodcikgx\",\"w\":\"datazfzdjekeb\"}},\"parameters\":{\"gcjf\":{\"type\":\"Bool\",\"defaultValue\":\"datawyfixi\"},\"kakrxifqnffo\":{\"type\":\"SecureString\",\"defaultValue\":\"dataulp\"},\"xcmmhipbvskcitly\":{\"type\":\"Object\",\"defaultValue\":\"dataqtzngxbsalewg\"},\"d\":{\"type\":\"String\",\"defaultValue\":\"datafsaangfgbmcvmh\"}},\"annotations\":[\"datauaj\",\"datalnac\",\"datadnxqeonm\"],\"folder\":{\"name\":\"jaojpzngdrzige\"},\"\":{\"ohheuy\":\"dataadsqyuddkhwqd\",\"gdeipnfizejwl\":\"dataunxmyevy\",\"jodapqok\":\"dataiacndjzwh\",\"pfapmqnmelyk\":\"datadyncradxsewbe\"}}") - .toObject(SapOdpResourceDataset.class); - Assertions.assertEquals("hssfnwh", model.description()); - Assertions.assertEquals("mkcu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gcjf").type()); - Assertions.assertEquals("jaojpzngdrzige", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOdpResourceDataset model = new SapOdpResourceDataset().withDescription("hssfnwh") - .withStructure("dataah") - .withSchema("datackn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("mkcu") - .withParameters(mapOf("dvhzfkdn", "datadgwuzron", "k", "dataysodcikgx", "w", "datazfzdjekeb"))) - .withParameters(mapOf("gcjf", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datawyfixi"), - "kakrxifqnffo", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataulp"), - "xcmmhipbvskcitly", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("dataqtzngxbsalewg"), "d", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datafsaangfgbmcvmh"))) - .withAnnotations(Arrays.asList("datauaj", "datalnac", "datadnxqeonm")) - .withFolder(new DatasetFolder().withName("jaojpzngdrzige")) - .withContext("datapuz") - .withObjectName("datadegefxlieggoto"); - model = BinaryData.fromObject(model).toObject(SapOdpResourceDataset.class); - Assertions.assertEquals("hssfnwh", model.description()); - Assertions.assertEquals("mkcu", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("gcjf").type()); - Assertions.assertEquals("jaojpzngdrzige", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java deleted file mode 100644 index d97ab9c4f478..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapOdpResourceDatasetTypeProperties; - -public final class SapOdpResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOdpResourceDatasetTypeProperties model - = BinaryData.fromString("{\"context\":\"dataygihiclmslnu\",\"objectName\":\"datakqvzlbbbajdexq\"}") - .toObject(SapOdpResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOdpResourceDatasetTypeProperties model - = new SapOdpResourceDatasetTypeProperties().withContext("dataygihiclmslnu") - .withObjectName("datakqvzlbbbajdexq"); - model = BinaryData.fromObject(model).toObject(SapOdpResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java deleted file mode 100644 index 663424977406..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOdpSourceTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapOdpSource; - -public final class SapOdpSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOdpSource model = BinaryData.fromString( - "{\"type\":\"SapOdpSource\",\"extractionMode\":\"databuk\",\"subscriberProcess\":\"datajcwdoecdqu\",\"selection\":\"datauqco\",\"projection\":\"datahdxjrrbyrbn\",\"queryTimeout\":\"datapsquouppzgdtu\",\"additionalColumns\":\"dataoimojcm\",\"sourceRetryCount\":\"datacd\",\"sourceRetryWait\":\"datavorzhzfoc\",\"maxConcurrentConnections\":\"datayltornv\",\"disableMetricsCollection\":\"datauy\",\"\":{\"bv\":\"dataifbdwyvvcyw\",\"okeqeowbp\":\"datathrexzvejqzyuik\",\"tgwerbpobvj\":\"dataiehvgchsg\",\"vvmdtkllqhznutrx\":\"dataunicgrxce\"}}") - .toObject(SapOdpSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOdpSource model = new SapOdpSource().withSourceRetryCount("datacd") - .withSourceRetryWait("datavorzhzfoc") - .withMaxConcurrentConnections("datayltornv") - .withDisableMetricsCollection("datauy") - .withQueryTimeout("datapsquouppzgdtu") - .withAdditionalColumns("dataoimojcm") - .withExtractionMode("databuk") - .withSubscriberProcess("datajcwdoecdqu") - .withSelection("datauqco") - .withProjection("datahdxjrrbyrbn"); - model = BinaryData.fromObject(model).toObject(SapOdpSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java deleted file mode 100644 index 84686b11e6b1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubSourceTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapOpenHubSource; - -public final class SapOpenHubSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOpenHubSource model = BinaryData.fromString( - "{\"type\":\"SapOpenHubSource\",\"excludeLastRequest\":\"datazglbplqh\",\"baseRequestId\":\"dataaruk\",\"customRfcReadTableFunctionModule\":\"datapsxufyqcqfou\",\"sapDataColumnDelimiter\":\"dataeyxgxbgo\",\"queryTimeout\":\"datapxpswp\",\"additionalColumns\":\"datasioo\",\"sourceRetryCount\":\"dataugbdkxlwck\",\"sourceRetryWait\":\"datalzk\",\"maxConcurrentConnections\":\"datajtapvqjebtdp\",\"disableMetricsCollection\":\"datakeexso\",\"\":{\"twtfqpmpyw\":\"datavylvt\"}}") - .toObject(SapOpenHubSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOpenHubSource model = new SapOpenHubSource().withSourceRetryCount("dataugbdkxlwck") - .withSourceRetryWait("datalzk") - .withMaxConcurrentConnections("datajtapvqjebtdp") - .withDisableMetricsCollection("datakeexso") - .withQueryTimeout("datapxpswp") - .withAdditionalColumns("datasioo") - .withExcludeLastRequest("datazglbplqh") - .withBaseRequestId("dataaruk") - .withCustomRfcReadTableFunctionModule("datapsxufyqcqfou") - .withSapDataColumnDelimiter("dataeyxgxbgo"); - model = BinaryData.fromObject(model).toObject(SapOpenHubSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java deleted file mode 100644 index d34b61e16174..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapOpenHubTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapOpenHubTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOpenHubTableDataset model = BinaryData.fromString( - "{\"type\":\"SapOpenHubTable\",\"typeProperties\":{\"openHubDestinationName\":\"datagenlrjcsmwevguyf\",\"excludeLastRequest\":\"dataxelrjk\",\"baseRequestId\":\"datacdetowwezhy\"},\"description\":\"di\",\"structure\":\"datawqlqacs\",\"schema\":\"databirtybcelfjn\",\"linkedServiceName\":{\"referenceName\":\"odnjyhzfaxskdv\",\"parameters\":{\"pegqxsorch\":\"dataumo\",\"kbmpw\":\"datazrqoxzyhlbeqvh\",\"zgaefhawkmib\":\"dataslajgg\",\"wi\":\"datay\"}},\"parameters\":{\"u\":{\"type\":\"Int\",\"defaultValue\":\"dataupdyttqm\"},\"s\":{\"type\":\"Array\",\"defaultValue\":\"datal\"}},\"annotations\":[\"datahhtuqmtxynof\",\"dataqobfixngxebihe\"],\"folder\":{\"name\":\"kingiqcdolrpgu\"},\"\":{\"dafbncuy\":\"datalbsm\",\"fzxjzi\":\"dataeykcnhpplzh\",\"wnuwkkfzzetl\":\"dataucrln\",\"vwywjvrlgqpwwlzp\":\"datahdyxz\"}}") - .toObject(SapOpenHubTableDataset.class); - Assertions.assertEquals("di", model.description()); - Assertions.assertEquals("odnjyhzfaxskdv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("u").type()); - Assertions.assertEquals("kingiqcdolrpgu", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOpenHubTableDataset model = new SapOpenHubTableDataset().withDescription("di") - .withStructure("datawqlqacs") - .withSchema("databirtybcelfjn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("odnjyhzfaxskdv") - .withParameters(mapOf("pegqxsorch", "dataumo", "kbmpw", "datazrqoxzyhlbeqvh", "zgaefhawkmib", - "dataslajgg", "wi", "datay"))) - .withParameters( - mapOf("u", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataupdyttqm"), - "s", new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datal"))) - .withAnnotations(Arrays.asList("datahhtuqmtxynof", "dataqobfixngxebihe")) - .withFolder(new DatasetFolder().withName("kingiqcdolrpgu")) - .withOpenHubDestinationName("datagenlrjcsmwevguyf") - .withExcludeLastRequest("dataxelrjk") - .withBaseRequestId("datacdetowwezhy"); - model = BinaryData.fromObject(model).toObject(SapOpenHubTableDataset.class); - Assertions.assertEquals("di", model.description()); - Assertions.assertEquals("odnjyhzfaxskdv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("u").type()); - Assertions.assertEquals("kingiqcdolrpgu", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java deleted file mode 100644 index f04b230dc674..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapOpenHubTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapOpenHubTableDatasetTypeProperties; - -public final class SapOpenHubTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapOpenHubTableDatasetTypeProperties model = BinaryData.fromString( - "{\"openHubDestinationName\":\"datadarcb\",\"excludeLastRequest\":\"datawhslxebaja\",\"baseRequestId\":\"datan\"}") - .toObject(SapOpenHubTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapOpenHubTableDatasetTypeProperties model - = new SapOpenHubTableDatasetTypeProperties().withOpenHubDestinationName("datadarcb") - .withExcludeLastRequest("datawhslxebaja") - .withBaseRequestId("datan"); - model = BinaryData.fromObject(model).toObject(SapOpenHubTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java deleted file mode 100644 index dc919bc7fa34..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTablePartitionSettingsTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapTablePartitionSettings; - -public final class SapTablePartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapTablePartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"dataigmqntutetdtgci\",\"partitionUpperBound\":\"datarjwiwou\",\"partitionLowerBound\":\"dataaqnfyhgrcm\",\"maxPartitionsNumber\":\"datappledxyect\"}") - .toObject(SapTablePartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapTablePartitionSettings model = new SapTablePartitionSettings().withPartitionColumnName("dataigmqntutetdtgci") - .withPartitionUpperBound("datarjwiwou") - .withPartitionLowerBound("dataaqnfyhgrcm") - .withMaxPartitionsNumber("datappledxyect"); - model = BinaryData.fromObject(model).toObject(SapTablePartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java deleted file mode 100644 index 611a3ed3e015..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SapTableResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SapTableResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapTableResourceDataset model = BinaryData.fromString( - "{\"type\":\"SapTableResource\",\"typeProperties\":{\"tableName\":\"datamlfjymgw\"},\"description\":\"szcfyzqpeqreg\",\"structure\":\"datardpagknxmaovr\",\"schema\":\"datahlnzffew\",\"linkedServiceName\":{\"referenceName\":\"qkycj\",\"parameters\":{\"abhgclejqzhpvh\":\"dataipqxxsdyafwtydsm\",\"gf\":\"datapbadjzeul\",\"vqerqxkomwdzpzl\":\"dataogtqscj\",\"qpwwvmbjecfwlbgh\":\"datacuex\"}},\"parameters\":{\"zpchiy\":{\"type\":\"String\",\"defaultValue\":\"dataxohlydsn\"},\"ozewbr\":{\"type\":\"String\",\"defaultValue\":\"datahmihikt\"},\"uxboufqnnqbjxgj\":{\"type\":\"String\",\"defaultValue\":\"datazgkbr\"}},\"annotations\":[\"dataerukbuu\",\"datari\",\"datawkwkjxlaacedikqe\",\"datassybzbe\"],\"folder\":{\"name\":\"nrommki\"},\"\":{\"yut\":\"datapwtmzyj\",\"i\":\"datamzqlnaag\",\"fqiywhxpsb\":\"dataj\"}}") - .toObject(SapTableResourceDataset.class); - Assertions.assertEquals("szcfyzqpeqreg", model.description()); - Assertions.assertEquals("qkycj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("zpchiy").type()); - Assertions.assertEquals("nrommki", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapTableResourceDataset model = new SapTableResourceDataset().withDescription("szcfyzqpeqreg") - .withStructure("datardpagknxmaovr") - .withSchema("datahlnzffew") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("qkycj") - .withParameters(mapOf("abhgclejqzhpvh", "dataipqxxsdyafwtydsm", "gf", "datapbadjzeul", - "vqerqxkomwdzpzl", "dataogtqscj", "qpwwvmbjecfwlbgh", "datacuex"))) - .withParameters(mapOf("zpchiy", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataxohlydsn"), "ozewbr", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datahmihikt"), - "uxboufqnnqbjxgj", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datazgkbr"))) - .withAnnotations(Arrays.asList("dataerukbuu", "datari", "datawkwkjxlaacedikqe", "datassybzbe")) - .withFolder(new DatasetFolder().withName("nrommki")) - .withTableName("datamlfjymgw"); - model = BinaryData.fromObject(model).toObject(SapTableResourceDataset.class); - Assertions.assertEquals("szcfyzqpeqreg", model.description()); - Assertions.assertEquals("qkycj", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("zpchiy").type()); - Assertions.assertEquals("nrommki", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java deleted file mode 100644 index 8a563758a51c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableResourceDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SapTableResourceDatasetTypeProperties; - -public final class SapTableResourceDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapTableResourceDatasetTypeProperties model = BinaryData.fromString("{\"tableName\":\"datapialezay\"}") - .toObject(SapTableResourceDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapTableResourceDatasetTypeProperties model - = new SapTableResourceDatasetTypeProperties().withTableName("datapialezay"); - model = BinaryData.fromObject(model).toObject(SapTableResourceDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java deleted file mode 100644 index 817e47a18c07..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SapTableSourceTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SapTablePartitionSettings; -import com.azure.resourcemanager.datafactory.models.SapTableSource; - -public final class SapTableSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SapTableSource model = BinaryData.fromString( - "{\"type\":\"SapTableSource\",\"rowCount\":\"dataz\",\"rowSkips\":\"datanniarjezj\",\"rfcTableFields\":\"dataxiqfoqwesqykqfs\",\"rfcTableOptions\":\"datalsaipshhet\",\"batchSize\":\"datawmzgvnojgmobkali\",\"customRfcReadTableFunctionModule\":\"dataikkehpdssvlubd\",\"sapDataColumnDelimiter\":\"dataowxsxbxd\",\"partitionOption\":\"dataixurcekc\",\"partitionSettings\":{\"partitionColumnName\":\"dataqqa\",\"partitionUpperBound\":\"datatghmtb\",\"partitionLowerBound\":\"datakcnkghkr\",\"maxPartitionsNumber\":\"datarshlheayod\"},\"queryTimeout\":\"datawnqbpxy\",\"additionalColumns\":\"dataftxzovbhqels\",\"sourceRetryCount\":\"datafxejpocsgigsab\",\"sourceRetryWait\":\"datandyjwmglgstrzfhe\",\"maxConcurrentConnections\":\"datazovkbcbef\",\"disableMetricsCollection\":\"datanymfhmljimkg\",\"\":{\"eet\":\"datavmtjcxigiszxdbgl\",\"izw\":\"dataivmbu\",\"juls\":\"datawfhfptbdxtvl\",\"zytxe\":\"datafi\"}}") - .toObject(SapTableSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SapTableSource model = new SapTableSource().withSourceRetryCount("datafxejpocsgigsab") - .withSourceRetryWait("datandyjwmglgstrzfhe") - .withMaxConcurrentConnections("datazovkbcbef") - .withDisableMetricsCollection("datanymfhmljimkg") - .withQueryTimeout("datawnqbpxy") - .withAdditionalColumns("dataftxzovbhqels") - .withRowCount("dataz") - .withRowSkips("datanniarjezj") - .withRfcTableFields("dataxiqfoqwesqykqfs") - .withRfcTableOptions("datalsaipshhet") - .withBatchSize("datawmzgvnojgmobkali") - .withCustomRfcReadTableFunctionModule("dataikkehpdssvlubd") - .withSapDataColumnDelimiter("dataowxsxbxd") - .withPartitionOption("dataixurcekc") - .withPartitionSettings(new SapTablePartitionSettings().withPartitionColumnName("dataqqa") - .withPartitionUpperBound("datatghmtb") - .withPartitionLowerBound("datakcnkghkr") - .withMaxPartitionsNumber("datarshlheayod")); - model = BinaryData.fromObject(model).toObject(SapTableSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java deleted file mode 100644 index 6a497c02edd5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerRecurrenceTests.java +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DayOfWeek; -import com.azure.resourcemanager.datafactory.models.DaysOfWeek; -import com.azure.resourcemanager.datafactory.models.RecurrenceFrequency; -import com.azure.resourcemanager.datafactory.models.RecurrenceSchedule; -import com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence; -import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScheduleTriggerRecurrenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScheduleTriggerRecurrence model = BinaryData.fromString( - "{\"frequency\":\"Hour\",\"interval\":1510832376,\"startTime\":\"2021-02-06T08:40:18Z\",\"endTime\":\"2021-06-26T17:02:55Z\",\"timeZone\":\"iuxdadcdrysanif\",\"schedule\":{\"minutes\":[1123798534,628663248,2143606471,1855573175],\"hours\":[1925731417,1127241689,2113680582],\"weekDays\":[\"Thursday\",\"Sunday\",\"Wednesday\"],\"monthDays\":[925862689,2036699891,520853376],\"monthlyOccurrences\":[{\"day\":\"Wednesday\",\"occurrence\":848484064,\"\":{\"azuned\":\"dataewjgjdq\"}},{\"day\":\"Monday\",\"occurrence\":1593708838,\"\":{\"wxcvwhutjjqzqi\":\"dataiti\",\"mihifrkyvut\":\"datasvoobjsrisfccfzu\",\"cricjmvsp\":\"datamc\",\"kzcizuegyl\":\"datadtladfc\"}}],\"\":{\"hbwaiswbacgrysjg\":\"dataefpijwrvbu\",\"fdcpeduxy\":\"datauzojupdcmpf\"}},\"\":{\"nkhgg\":\"datapfdhfp\"}}") - .toObject(ScheduleTriggerRecurrence.class); - Assertions.assertEquals(RecurrenceFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1510832376, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-06T08:40:18Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-26T17:02:55Z"), model.endTime()); - Assertions.assertEquals("iuxdadcdrysanif", model.timeZone()); - Assertions.assertEquals(1123798534, model.schedule().minutes().get(0)); - Assertions.assertEquals(1925731417, model.schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.THURSDAY, model.schedule().weekDays().get(0)); - Assertions.assertEquals(925862689, model.schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.WEDNESDAY, model.schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(848484064, model.schedule().monthlyOccurrences().get(0).occurrence()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScheduleTriggerRecurrence model = new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.HOUR) - .withInterval(1510832376) - .withStartTime(OffsetDateTime.parse("2021-02-06T08:40:18Z")) - .withEndTime(OffsetDateTime.parse("2021-06-26T17:02:55Z")) - .withTimeZone("iuxdadcdrysanif") - .withSchedule( - new RecurrenceSchedule().withMinutes(Arrays.asList(1123798534, 628663248, 2143606471, 1855573175)) - .withHours(Arrays.asList(1925731417, 1127241689, 2113680582)) - .withWeekDays(Arrays.asList(DaysOfWeek.THURSDAY, DaysOfWeek.SUNDAY, DaysOfWeek.WEDNESDAY)) - .withMonthDays(Arrays.asList(925862689, 2036699891, 520853376)) - .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) - .withOccurrence(848484064) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.MONDAY) - .withOccurrence(1593708838) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf())) - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(ScheduleTriggerRecurrence.class); - Assertions.assertEquals(RecurrenceFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1510832376, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-06T08:40:18Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-06-26T17:02:55Z"), model.endTime()); - Assertions.assertEquals("iuxdadcdrysanif", model.timeZone()); - Assertions.assertEquals(1123798534, model.schedule().minutes().get(0)); - Assertions.assertEquals(1925731417, model.schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.THURSDAY, model.schedule().weekDays().get(0)); - Assertions.assertEquals(925862689, model.schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.WEDNESDAY, model.schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(848484064, model.schedule().monthlyOccurrences().get(0).occurrence()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java deleted file mode 100644 index e833bbaf5cbd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTests.java +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DayOfWeek; -import com.azure.resourcemanager.datafactory.models.DaysOfWeek; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.RecurrenceFrequency; -import com.azure.resourcemanager.datafactory.models.RecurrenceSchedule; -import com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence; -import com.azure.resourcemanager.datafactory.models.ScheduleTrigger; -import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScheduleTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScheduleTrigger model = BinaryData.fromString( - "{\"type\":\"ScheduleTrigger\",\"typeProperties\":{\"recurrence\":{\"frequency\":\"Day\",\"interval\":908629847,\"startTime\":\"2021-10-10T09:03:58Z\",\"endTime\":\"2021-04-07T08:57:02Z\",\"timeZone\":\"rwzepnlwuhtfa\",\"schedule\":{\"minutes\":[716679257,672428534],\"hours\":[1803788733,2048345821,178936990],\"weekDays\":[\"Monday\",\"Tuesday\",\"Wednesday\",\"Sunday\"],\"monthDays\":[1415296042,968780656,1960354924],\"monthlyOccurrences\":[{\"day\":\"Thursday\",\"occurrence\":1355959866,\"\":{\"nzskokaqbjncnbn\":\"datauipixpztfdujuoii\",\"pnobcqnym\":\"datappuzbipfazsayrkd\",\"xcno\":\"dataswrtifxbhuzv\"}},{\"day\":\"Friday\",\"occurrence\":1783575266,\"\":{\"zyfjmorehpjaktsz\":\"dataaixxigesbeivpuu\",\"nxrwgddg\":\"datacirrphtjljfmhg\",\"rizqein\":\"dataqfflswqeht\"}},{\"day\":\"Wednesday\",\"occurrence\":233335385,\"\":{\"xosszqu\":\"datayk\"}}],\"\":{\"wpgwpulrtjweuoro\":\"dataklsthjvyk\",\"dzvlitntdidhhac\":\"datarnkxswohshnc\",\"tgiontv\":\"datawdl\",\"oshkqthuijvi\":\"datar\"}},\"\":{\"sfuzqpigirnm\":\"dataswpwbgoetuxos\",\"gmwyfxeui\":\"dataeim\",\"ertgq\":\"datavtkllbfnn\"}}},\"pipelines\":[{\"pipelineReference\":{\"referenceName\":\"hvyr\",\"name\":\"euvu\"},\"parameters\":{\"wunwactjpg\":\"dataldkjayiexpcxylqu\",\"umkzdtjngkfi\":\"dataseulfzxghaylrvp\",\"lmbpjna\":\"dataxolpu\",\"enth\":\"datasbxvouxc\"}},{\"pipelineReference\":{\"referenceName\":\"wrme\",\"name\":\"ckocex\"},\"parameters\":{\"zwsjqrmxp\":\"datafniryhlarohwqxj\",\"bamnkgmosayfyvod\":\"dataz\",\"gwsrr\":\"datatpczzqusf\"}},{\"pipelineReference\":{\"referenceName\":\"ijolys\",\"name\":\"yswyaejffvfkk\"},\"parameters\":{\"wcuhqfxferfza\":\"datasjecccfyc\"}},{\"pipelineReference\":{\"referenceName\":\"ermnyphcoobs\",\"name\":\"obzrnvubszjytt\"},\"parameters\":{\"blalmgezkbh\":\"datascmtccz\",\"omeczd\":\"datarft\"}}],\"description\":\"bgp\",\"runtimeState\":\"Stopped\",\"annotations\":[\"databefavbsbhdtiaqa\",\"dataalbkemodlvdhvdvd\",\"datarrkvxmeihrzi\"],\"\":{\"dsu\":\"datanpojmgkeoqrx\",\"kbvaxehiegk\":\"dataonjuwgvse\",\"i\":\"dataukvalcvlbqht\"}}") - .toObject(ScheduleTrigger.class); - Assertions.assertEquals("bgp", model.description()); - Assertions.assertEquals("hvyr", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("euvu", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals(RecurrenceFrequency.DAY, model.recurrence().frequency()); - Assertions.assertEquals(908629847, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-10T09:03:58Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-07T08:57:02Z"), model.recurrence().endTime()); - Assertions.assertEquals("rwzepnlwuhtfa", model.recurrence().timeZone()); - Assertions.assertEquals(716679257, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1803788733, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.MONDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(1415296042, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1355959866, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScheduleTrigger model = new ScheduleTrigger().withDescription("bgp") - .withAnnotations(Arrays.asList("databefavbsbhdtiaqa", "dataalbkemodlvdhvdvd", "datarrkvxmeihrzi")) - .withPipelines(Arrays.asList( - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("hvyr").withName("euvu")) - .withParameters(mapOf("wunwactjpg", "dataldkjayiexpcxylqu", "umkzdtjngkfi", "dataseulfzxghaylrvp", - "lmbpjna", "dataxolpu", "enth", "datasbxvouxc")), - new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("wrme").withName("ckocex")) - .withParameters(mapOf("zwsjqrmxp", "datafniryhlarohwqxj", "bamnkgmosayfyvod", "dataz", "gwsrr", - "datatpczzqusf")), - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("ijolys").withName("yswyaejffvfkk")) - .withParameters(mapOf("wcuhqfxferfza", "datasjecccfyc")), - new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("ermnyphcoobs").withName("obzrnvubszjytt")) - .withParameters(mapOf("blalmgezkbh", "datascmtccz", "omeczd", "datarft")))) - .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.DAY) - .withInterval(908629847) - .withStartTime(OffsetDateTime.parse("2021-10-10T09:03:58Z")) - .withEndTime(OffsetDateTime.parse("2021-04-07T08:57:02Z")) - .withTimeZone("rwzepnlwuhtfa") - .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(716679257, 672428534)) - .withHours(Arrays.asList(1803788733, 2048345821, 178936990)) - .withWeekDays( - Arrays.asList(DaysOfWeek.MONDAY, DaysOfWeek.TUESDAY, DaysOfWeek.WEDNESDAY, DaysOfWeek.SUNDAY)) - .withMonthDays(Arrays.asList(1415296042, 968780656, 1960354924)) - .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY) - .withOccurrence(1355959866) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY) - .withOccurrence(1783575266) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.WEDNESDAY) - .withOccurrence(233335385) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf())) - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(ScheduleTrigger.class); - Assertions.assertEquals("bgp", model.description()); - Assertions.assertEquals("hvyr", model.pipelines().get(0).pipelineReference().referenceName()); - Assertions.assertEquals("euvu", model.pipelines().get(0).pipelineReference().name()); - Assertions.assertEquals(RecurrenceFrequency.DAY, model.recurrence().frequency()); - Assertions.assertEquals(908629847, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-10T09:03:58Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-04-07T08:57:02Z"), model.recurrence().endTime()); - Assertions.assertEquals("rwzepnlwuhtfa", model.recurrence().timeZone()); - Assertions.assertEquals(716679257, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1803788733, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.MONDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(1415296042, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.THURSDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1355959866, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java deleted file mode 100644 index 1f0737617fcc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScheduleTriggerTypePropertiesTests.java +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ScheduleTriggerTypeProperties; -import com.azure.resourcemanager.datafactory.models.DayOfWeek; -import com.azure.resourcemanager.datafactory.models.DaysOfWeek; -import com.azure.resourcemanager.datafactory.models.RecurrenceFrequency; -import com.azure.resourcemanager.datafactory.models.RecurrenceSchedule; -import com.azure.resourcemanager.datafactory.models.RecurrenceScheduleOccurrence; -import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScheduleTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScheduleTriggerTypeProperties model = BinaryData.fromString( - "{\"recurrence\":{\"frequency\":\"Month\",\"interval\":344035972,\"startTime\":\"2021-10-09T02:30:24Z\",\"endTime\":\"2021-01-05T14:10:51Z\",\"timeZone\":\"ekmwzsmyak\",\"schedule\":{\"minutes\":[1052879269,348269140],\"hours\":[1766816008,2054418512,2101542608,699538918],\"weekDays\":[\"Monday\",\"Saturday\"],\"monthDays\":[1038176636,390845598,1063219120,1945461682],\"monthlyOccurrences\":[{\"day\":\"Sunday\",\"occurrence\":1187472245,\"\":{\"uwttrvgzjfptprfq\":\"dataoghkhzwfns\"}},{\"day\":\"Friday\",\"occurrence\":2051440417,\"\":{\"qtkxiyj\":\"datadzvtfkd\",\"dhpi\":\"databezvxe\",\"gwbkfcajtxzduqth\":\"datakrqkylmfydiod\"}},{\"day\":\"Thursday\",\"occurrence\":1550901884,\"\":{\"fvjfaqah\":\"dataaexewftqo\",\"bdajc\":\"dataeskdsbp\",\"jryppvdhklcczg\":\"datarlnxjucoj\"}}],\"\":{\"rbuvwug\":\"datagzstcfwbfta\",\"umejpqxuiodwblau\":\"datawrclxhvesoodxmmt\"}},\"\":{\"mtzlcvokvo\":\"dataxydvceuywy\",\"znnlsqymvihhgp\":\"datajbjdyoccnlvyhiet\"}}}") - .toObject(ScheduleTriggerTypeProperties.class); - Assertions.assertEquals(RecurrenceFrequency.MONTH, model.recurrence().frequency()); - Assertions.assertEquals(344035972, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-09T02:30:24Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-05T14:10:51Z"), model.recurrence().endTime()); - Assertions.assertEquals("ekmwzsmyak", model.recurrence().timeZone()); - Assertions.assertEquals(1052879269, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1766816008, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.MONDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(1038176636, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.SUNDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1187472245, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScheduleTriggerTypeProperties model = new ScheduleTriggerTypeProperties() - .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MONTH) - .withInterval(344035972) - .withStartTime(OffsetDateTime.parse("2021-10-09T02:30:24Z")) - .withEndTime(OffsetDateTime.parse("2021-01-05T14:10:51Z")) - .withTimeZone("ekmwzsmyak") - .withSchedule(new RecurrenceSchedule().withMinutes(Arrays.asList(1052879269, 348269140)) - .withHours(Arrays.asList(1766816008, 2054418512, 2101542608, 699538918)) - .withWeekDays(Arrays.asList(DaysOfWeek.MONDAY, DaysOfWeek.SATURDAY)) - .withMonthDays(Arrays.asList(1038176636, 390845598, 1063219120, 1945461682)) - .withMonthlyOccurrences(Arrays.asList( - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.SUNDAY) - .withOccurrence(1187472245) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.FRIDAY) - .withOccurrence(2051440417) - .withAdditionalProperties(mapOf()), - new RecurrenceScheduleOccurrence().withDay(DayOfWeek.THURSDAY) - .withOccurrence(1550901884) - .withAdditionalProperties(mapOf()))) - .withAdditionalProperties(mapOf())) - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(ScheduleTriggerTypeProperties.class); - Assertions.assertEquals(RecurrenceFrequency.MONTH, model.recurrence().frequency()); - Assertions.assertEquals(344035972, model.recurrence().interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-09T02:30:24Z"), model.recurrence().startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-05T14:10:51Z"), model.recurrence().endTime()); - Assertions.assertEquals("ekmwzsmyak", model.recurrence().timeZone()); - Assertions.assertEquals(1052879269, model.recurrence().schedule().minutes().get(0)); - Assertions.assertEquals(1766816008, model.recurrence().schedule().hours().get(0)); - Assertions.assertEquals(DaysOfWeek.MONDAY, model.recurrence().schedule().weekDays().get(0)); - Assertions.assertEquals(1038176636, model.recurrence().schedule().monthDays().get(0)); - Assertions.assertEquals(DayOfWeek.SUNDAY, model.recurrence().schedule().monthlyOccurrences().get(0).day()); - Assertions.assertEquals(1187472245, model.recurrence().schedule().monthlyOccurrences().get(0).occurrence()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java deleted file mode 100644 index 155962430f39..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActionTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ScriptAction; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActionTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptAction model = BinaryData - .fromString("{\"name\":\"b\",\"uri\":\"htvughu\",\"roles\":\"dataiql\",\"parameters\":\"nfyfy\"}") - .toObject(ScriptAction.class); - Assertions.assertEquals("b", model.name()); - Assertions.assertEquals("htvughu", model.uri()); - Assertions.assertEquals("nfyfy", model.parameters()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptAction model - = new ScriptAction().withName("b").withUri("htvughu").withRoles("dataiql").withParameters("nfyfy"); - model = BinaryData.fromObject(model).toObject(ScriptAction.class); - Assertions.assertEquals("b", model.name()); - Assertions.assertEquals("htvughu", model.uri()); - Assertions.assertEquals("nfyfy", model.parameters()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java deleted file mode 100644 index 7f1d303ab781..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityParameterTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameter; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActivityParameterTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptActivityParameter model = BinaryData.fromString( - "{\"name\":\"dataobw\",\"type\":\"Single\",\"value\":\"datalorinwtvsb\",\"direction\":\"Output\",\"size\":1070675833}") - .toObject(ScriptActivityParameter.class); - Assertions.assertEquals(ScriptActivityParameterType.SINGLE, model.type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.direction()); - Assertions.assertEquals(1070675833, model.size()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptActivityParameter model = new ScriptActivityParameter().withName("dataobw") - .withType(ScriptActivityParameterType.SINGLE) - .withValue("datalorinwtvsb") - .withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(1070675833); - model = BinaryData.fromObject(model).toObject(ScriptActivityParameter.class); - Assertions.assertEquals(ScriptActivityParameterType.SINGLE, model.type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, model.direction()); - Assertions.assertEquals(1070675833, model.size()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java deleted file mode 100644 index 477d6aef9a67..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityScriptBlockTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameter; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; -import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActivityScriptBlockTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptActivityScriptBlock model = BinaryData.fromString( - "{\"text\":\"datayvpo\",\"type\":\"datacxnrwazioyha\",\"parameters\":[{\"name\":\"datalnkwquwo\",\"type\":\"Int32\",\"value\":\"datacweeakgtrwosps\",\"direction\":\"InputOutput\",\"size\":1372371482},{\"name\":\"datavihuifihp\",\"type\":\"Single\",\"value\":\"datawjsqdchbuviifuy\",\"direction\":\"Input\",\"size\":1103955430},{\"name\":\"datajcaqeorv\",\"type\":\"Decimal\",\"value\":\"datarffydetmehdze\",\"direction\":\"Output\",\"size\":1813263959}]}") - .toObject(ScriptActivityScriptBlock.class); - Assertions.assertEquals(ScriptActivityParameterType.INT32, model.parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.parameters().get(0).direction()); - Assertions.assertEquals(1372371482, model.parameters().get(0).size()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptActivityScriptBlock model = new ScriptActivityScriptBlock().withText("datayvpo") - .withType("datacxnrwazioyha") - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datalnkwquwo") - .withType(ScriptActivityParameterType.INT32) - .withValue("datacweeakgtrwosps") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(1372371482), - new ScriptActivityParameter().withName("datavihuifihp") - .withType(ScriptActivityParameterType.SINGLE) - .withValue("datawjsqdchbuviifuy") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(1103955430), - new ScriptActivityParameter().withName("datajcaqeorv") - .withType(ScriptActivityParameterType.DECIMAL) - .withValue("datarffydetmehdze") - .withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(1813263959))); - model = BinaryData.fromObject(model).toObject(ScriptActivityScriptBlock.class); - Assertions.assertEquals(ScriptActivityParameterType.INT32, model.parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT_OUTPUT, model.parameters().get(0).direction()); - Assertions.assertEquals(1372371482, model.parameters().get(0).size()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java deleted file mode 100644 index 800a385597f1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTests.java +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.ScriptActivity; -import com.azure.resourcemanager.datafactory.models.ScriptActivityLogDestination; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameter; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; -import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; -import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptActivity model = BinaryData.fromString( - "{\"type\":\"Script\",\"typeProperties\":{\"scriptBlockExecutionTimeout\":\"dataschttl\",\"scripts\":[{\"text\":\"datawrnuklshrqr\",\"type\":\"datatchttbdxj\",\"parameters\":[{\"name\":\"datadrfxqudyadxnrtk\",\"type\":\"Int32\",\"value\":\"dataun\",\"direction\":\"Output\",\"size\":939068770},{\"name\":\"datawjmbgbgvyz\",\"type\":\"String\",\"value\":\"datazwu\",\"direction\":\"InputOutput\",\"size\":1285629719},{\"name\":\"datacrsr\",\"type\":\"Boolean\",\"value\":\"datatxeu\",\"direction\":\"InputOutput\",\"size\":1313358170},{\"name\":\"dataisx\",\"type\":\"Int64\",\"value\":\"datafmrzxz\",\"direction\":\"Input\",\"size\":348128957}]},{\"text\":\"datamadyick\",\"type\":\"datahrdnlabo\",\"parameters\":[{\"name\":\"datat\",\"type\":\"String\",\"value\":\"datakikbuaqdopxbnr\",\"direction\":\"Input\",\"size\":609263343},{\"name\":\"dataoiiyp\",\"type\":\"Decimal\",\"value\":\"datauywxygztlq\",\"direction\":\"InputOutput\",\"size\":870780592},{\"name\":\"datanqsjkt\",\"type\":\"Int64\",\"value\":\"dataagoqfmk\",\"direction\":\"InputOutput\",\"size\":2002556592}]},{\"text\":\"datagdlskwfiwvdq\",\"type\":\"dataqqrzeoto\",\"parameters\":[{\"name\":\"datatm\",\"type\":\"Double\",\"value\":\"dataroszxiwmw\",\"direction\":\"InputOutput\",\"size\":1990743836},{\"name\":\"datahhbkynfxxl\",\"type\":\"Int16\",\"value\":\"dataskhdvqg\",\"direction\":\"Output\",\"size\":465700229}]}],\"logSettings\":{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"syhpzrosuawdlsm\",\"parameters\":{\"pzfjbzosyttur\":\"datanksovvbt\",\"l\":\"datahkpdkwvwxrxmu\",\"ygwwxentudpvsnll\":\"datasagp\",\"pmmtlwrwsgyqwfp\":\"datajbb\"}},\"path\":\"datafkyttxgtcov\"}}},\"linkedServiceName\":{\"referenceName\":\"cp\",\"parameters\":{\"jg\":\"dataqdgpnpdmk\",\"wxfwcqc\":\"dataascxmnbenanhzx\",\"kfwokzizlaha\":\"datayju\"}},\"policy\":{\"timeout\":\"dataghoez\",\"retry\":\"datakiuzxphhwnvtuh\",\"retryIntervalInSeconds\":970772161,\"secureInput\":true,\"secureOutput\":true,\"\":{\"dawst\":\"databmddgqbkng\",\"fkrcshbdvqoivb\":\"dataten\",\"n\":\"dataenggxesxx\"}},\"name\":\"k\",\"description\":\"wkriv\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ykriwpxcpypyf\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"muxagcoygznmr\":\"dataqcte\",\"gpgobaqnhlktdzfm\":\"datacnqexl\",\"npidvcoghptjvs\":\"dataqj\"}},{\"activity\":\"frirxlvu\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"orkcplpuddn\":\"datazgrbub\",\"lzobhhqefzamgic\":\"datadxxzoywk\"}}],\"userProperties\":[{\"name\":\"xunzqpfgrqmq\",\"value\":\"datazhrb\"},{\"name\":\"thceoujfa\",\"value\":\"datatjxggspydmul\"},{\"name\":\"lzrhc\",\"value\":\"datavxy\"},{\"name\":\"loxfnzjpg\",\"value\":\"dataeggtyifmfi\"}],\"\":{\"unooouq\":\"datadbayxdr\"}}") - .toObject(ScriptActivity.class); - Assertions.assertEquals("k", model.name()); - Assertions.assertEquals("wkriv", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ykriwpxcpypyf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("xunzqpfgrqmq", model.userProperties().get(0).name()); - Assertions.assertEquals("cp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(970772161, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(ScriptActivityParameterType.INT32, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, - model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(939068770, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("syhpzrosuawdlsm", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptActivity model - = new ScriptActivity().withName("k") - .withDescription("wkriv") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("ykriwpxcpypyf") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("frirxlvu") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("xunzqpfgrqmq").withValue("datazhrb"), - new UserProperty().withName("thceoujfa").withValue("datatjxggspydmul"), - new UserProperty().withName("lzrhc").withValue("datavxy"), - new UserProperty().withName("loxfnzjpg").withValue("dataeggtyifmfi"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cp") - .withParameters( - mapOf("jg", "dataqdgpnpdmk", "wxfwcqc", "dataascxmnbenanhzx", "kfwokzizlaha", "datayju"))) - .withPolicy(new ActivityPolicy().withTimeout("dataghoez") - .withRetry("datakiuzxphhwnvtuh") - .withRetryIntervalInSeconds(970772161) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withScriptBlockExecutionTimeout("dataschttl") - .withScripts( - Arrays - .asList(new ScriptActivityScriptBlock().withText("datawrnuklshrqr") - .withType("datatchttbdxj") - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datadrfxqudyadxnrtk") - .withType(ScriptActivityParameterType.INT32) - .withValue("dataun") - .withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(939068770), - new ScriptActivityParameter().withName("datawjmbgbgvyz") - .withType(ScriptActivityParameterType.STRING) - .withValue("datazwu") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(1285629719), - new ScriptActivityParameter().withName("datacrsr") - .withType(ScriptActivityParameterType.BOOLEAN) - .withValue("datatxeu") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(1313358170), - new ScriptActivityParameter().withName("dataisx") - .withType(ScriptActivityParameterType.INT64) - .withValue("datafmrzxz") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(348128957))), - new ScriptActivityScriptBlock().withText("datamadyick") - .withType("datahrdnlabo") - .withParameters( - Arrays - .asList( - new ScriptActivityParameter().withName("datat") - .withType(ScriptActivityParameterType.STRING) - .withValue("datakikbuaqdopxbnr") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(609263343), - new ScriptActivityParameter().withName("dataoiiyp") - .withType(ScriptActivityParameterType.DECIMAL) - .withValue("datauywxygztlq") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(870780592), - new ScriptActivityParameter().withName("datanqsjkt") - .withType(ScriptActivityParameterType.INT64) - .withValue("dataagoqfmk") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(2002556592))), - new ScriptActivityScriptBlock().withText("datagdlskwfiwvdq") - .withType("dataqqrzeoto") - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datatm") - .withType(ScriptActivityParameterType.DOUBLE) - .withValue("dataroszxiwmw") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(1990743836), - new ScriptActivityParameter().withName("datahhbkynfxxl") - .withType(ScriptActivityParameterType.INT16) - .withValue("dataskhdvqg") - .withDirection(ScriptActivityParameterDirection.OUTPUT) - .withSize(465700229))))) - .withLogSettings(new ScriptActivityTypePropertiesLogSettings() - .withLogDestination(ScriptActivityLogDestination.ACTIVITY_OUTPUT) - .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("syhpzrosuawdlsm") - .withParameters(mapOf("pzfjbzosyttur", "datanksovvbt", "l", "datahkpdkwvwxrxmu", - "ygwwxentudpvsnll", "datasagp", "pmmtlwrwsgyqwfp", "datajbb"))) - .withPath("datafkyttxgtcov"))); - model = BinaryData.fromObject(model).toObject(ScriptActivity.class); - Assertions.assertEquals("k", model.name()); - Assertions.assertEquals("wkriv", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("ykriwpxcpypyf", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("xunzqpfgrqmq", model.userProperties().get(0).name()); - Assertions.assertEquals("cp", model.linkedServiceName().referenceName()); - Assertions.assertEquals(970772161, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(ScriptActivityParameterType.INT32, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.OUTPUT, - model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(939068770, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("syhpzrosuawdlsm", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java deleted file mode 100644 index 95e9af43a3dc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesLogSettingsTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.ScriptActivityLogDestination; -import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActivityTypePropertiesLogSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptActivityTypePropertiesLogSettings model = BinaryData.fromString( - "{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"imwfeoutztl\",\"parameters\":{\"cyiuiwkrwpishc\":\"dataymtddkyyrpbnqijl\",\"piicnwt\":\"dataxcepn\"}},\"path\":\"datayyskujnzxhotyh\"}}") - .toObject(ScriptActivityTypePropertiesLogSettings.class); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logDestination()); - Assertions.assertEquals("imwfeoutztl", model.logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptActivityTypePropertiesLogSettings model = new ScriptActivityTypePropertiesLogSettings() - .withLogDestination(ScriptActivityLogDestination.ACTIVITY_OUTPUT) - .withLogLocationSettings(new LogLocationSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("imwfeoutztl") - .withParameters(mapOf("cyiuiwkrwpishc", "dataymtddkyyrpbnqijl", "piicnwt", "dataxcepn"))) - .withPath("datayyskujnzxhotyh")); - model = BinaryData.fromObject(model).toObject(ScriptActivityTypePropertiesLogSettings.class); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logDestination()); - Assertions.assertEquals("imwfeoutztl", model.logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java deleted file mode 100644 index 9f4b201aa1e7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ScriptActivityTypePropertiesTests.java +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ScriptActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.LogLocationSettings; -import com.azure.resourcemanager.datafactory.models.ScriptActivityLogDestination; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameter; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterDirection; -import com.azure.resourcemanager.datafactory.models.ScriptActivityParameterType; -import com.azure.resourcemanager.datafactory.models.ScriptActivityScriptBlock; -import com.azure.resourcemanager.datafactory.models.ScriptActivityTypePropertiesLogSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ScriptActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ScriptActivityTypeProperties model = BinaryData.fromString( - "{\"scriptBlockExecutionTimeout\":\"datavrbhqxew\",\"scripts\":[{\"text\":\"datanqh\",\"type\":\"datanehpuhlj\",\"parameters\":[{\"name\":\"datavtptpjabsz\",\"type\":\"String\",\"value\":\"datagdwgqkoxbghpg\",\"direction\":\"Input\",\"size\":791400434},{\"name\":\"dataawo\",\"type\":\"String\",\"value\":\"datarmwr\",\"direction\":\"Input\",\"size\":248460134},{\"name\":\"datamxsnst\",\"type\":\"Single\",\"value\":\"datagraimunmgt\",\"direction\":\"Input\",\"size\":1552583257}]},{\"text\":\"datajnqmjmpx\",\"type\":\"datahyxiez\",\"parameters\":[{\"name\":\"dataaxgw\",\"type\":\"Decimal\",\"value\":\"datalsasyl\",\"direction\":\"Input\",\"size\":1856843732},{\"name\":\"dataybpwzg\",\"type\":\"DateTime\",\"value\":\"datakmut\",\"direction\":\"InputOutput\",\"size\":522707980},{\"name\":\"dataxb\",\"type\":\"Guid\",\"value\":\"datafscbga\",\"direction\":\"Input\",\"size\":943446370},{\"name\":\"datal\",\"type\":\"DateTimeOffset\",\"value\":\"datanshlu\",\"direction\":\"Input\",\"size\":1901559633}]}],\"logSettings\":{\"logDestination\":\"ActivityOutput\",\"logLocationSettings\":{\"linkedServiceName\":{\"referenceName\":\"atshibtxgtibm\",\"parameters\":{\"vmclnsahpswspyif\":\"datadpjnwgtxp\",\"hutabhmck\":\"datasakaihwdybjgyxb\"}},\"path\":\"datauadoxlleohvc\"}}}") - .toObject(ScriptActivityTypeProperties.class); - Assertions.assertEquals(ScriptActivityParameterType.STRING, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT, - model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(791400434, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("atshibtxgtibm", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ScriptActivityTypeProperties model - = new ScriptActivityTypeProperties().withScriptBlockExecutionTimeout("datavrbhqxew") - .withScripts( - Arrays - .asList( - new ScriptActivityScriptBlock().withText("datanqh") - .withType("datanehpuhlj") - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("datavtptpjabsz") - .withType(ScriptActivityParameterType.STRING) - .withValue("datagdwgqkoxbghpg") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(791400434), - new ScriptActivityParameter().withName("dataawo") - .withType(ScriptActivityParameterType.STRING) - .withValue("datarmwr") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(248460134), - new ScriptActivityParameter().withName("datamxsnst") - .withType(ScriptActivityParameterType.SINGLE) - .withValue("datagraimunmgt") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(1552583257))), - new ScriptActivityScriptBlock().withText("datajnqmjmpx") - .withType("datahyxiez") - .withParameters(Arrays.asList( - new ScriptActivityParameter().withName("dataaxgw") - .withType(ScriptActivityParameterType.DECIMAL) - .withValue("datalsasyl") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(1856843732), - new ScriptActivityParameter().withName("dataybpwzg") - .withType(ScriptActivityParameterType.DATE_TIME) - .withValue("datakmut") - .withDirection(ScriptActivityParameterDirection.INPUT_OUTPUT) - .withSize(522707980), - new ScriptActivityParameter().withName("dataxb") - .withType(ScriptActivityParameterType.GUID) - .withValue("datafscbga") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(943446370), - new ScriptActivityParameter().withName("datal") - .withType(ScriptActivityParameterType.DATE_TIME_OFFSET) - .withValue("datanshlu") - .withDirection(ScriptActivityParameterDirection.INPUT) - .withSize(1901559633))))) - .withLogSettings( - new ScriptActivityTypePropertiesLogSettings() - .withLogDestination(ScriptActivityLogDestination.ACTIVITY_OUTPUT) - .withLogLocationSettings( - new LogLocationSettings() - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("atshibtxgtibm") - .withParameters(mapOf("vmclnsahpswspyif", "datadpjnwgtxp", "hutabhmck", - "datasakaihwdybjgyxb"))) - .withPath("datauadoxlleohvc"))); - model = BinaryData.fromObject(model).toObject(ScriptActivityTypeProperties.class); - Assertions.assertEquals(ScriptActivityParameterType.STRING, model.scripts().get(0).parameters().get(0).type()); - Assertions.assertEquals(ScriptActivityParameterDirection.INPUT, - model.scripts().get(0).parameters().get(0).direction()); - Assertions.assertEquals(791400434, model.scripts().get(0).parameters().get(0).size()); - Assertions.assertEquals(ScriptActivityLogDestination.ACTIVITY_OUTPUT, model.logSettings().logDestination()); - Assertions.assertEquals("atshibtxgtibm", - model.logSettings().logLocationSettings().linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java deleted file mode 100644 index e1268f0b1eae..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecretBaseTests.java +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SecretBase; - -public final class SecretBaseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SecretBase model = BinaryData.fromString("{\"type\":\"SecretBase\"}").toObject(SecretBase.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SecretBase model = new SecretBase(); - model = BinaryData.fromObject(model).toObject(SecretBase.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java deleted file mode 100644 index f48ba57c2b67..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureInputOutputPolicyTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SecureInputOutputPolicy; -import org.junit.jupiter.api.Assertions; - -public final class SecureInputOutputPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SecureInputOutputPolicy model = BinaryData.fromString("{\"secureInput\":false,\"secureOutput\":false}") - .toObject(SecureInputOutputPolicy.class); - Assertions.assertEquals(false, model.secureInput()); - Assertions.assertEquals(false, model.secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SecureInputOutputPolicy model = new SecureInputOutputPolicy().withSecureInput(false).withSecureOutput(false); - model = BinaryData.fromObject(model).toObject(SecureInputOutputPolicy.class); - Assertions.assertEquals(false, model.secureInput()); - Assertions.assertEquals(false, model.secureOutput()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java deleted file mode 100644 index c578178897f3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SecureStringTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SecureString; -import org.junit.jupiter.api.Assertions; - -public final class SecureStringTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SecureString model = BinaryData.fromString("{\"type\":\"SecureString\",\"value\":\"vlbpktgdstyou\"}") - .toObject(SecureString.class); - Assertions.assertEquals("vlbpktgdstyou", model.value()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SecureString model = new SecureString().withValue("vlbpktgdstyou"); - model = BinaryData.fromObject(model).toObject(SecureString.class); - Assertions.assertEquals("vlbpktgdstyou", model.value()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java deleted file mode 100644 index a704541fc69f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfDependencyTumblingWindowTriggerReferenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SelfDependencyTumblingWindowTriggerReference; -import org.junit.jupiter.api.Assertions; - -public final class SelfDependencyTumblingWindowTriggerReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SelfDependencyTumblingWindowTriggerReference model = BinaryData.fromString( - "{\"type\":\"SelfDependencyTumblingWindowTriggerReference\",\"offset\":\"siaszqhpel\",\"size\":\"kwc\"}") - .toObject(SelfDependencyTumblingWindowTriggerReference.class); - Assertions.assertEquals("siaszqhpel", model.offset()); - Assertions.assertEquals("kwc", model.size()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SelfDependencyTumblingWindowTriggerReference model - = new SelfDependencyTumblingWindowTriggerReference().withOffset("siaszqhpel").withSize("kwc"); - model = BinaryData.fromObject(model).toObject(SelfDependencyTumblingWindowTriggerReference.class); - Assertions.assertEquals("siaszqhpel", model.offset()); - Assertions.assertEquals("kwc", model.size()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java deleted file mode 100644 index 3131e9319754..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeNodeInnerTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.core.util.serializer.JacksonAdapter; -import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner; -import java.util.HashMap; -import java.util.Map; - -public final class SelfHostedIntegrationRuntimeNodeInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SelfHostedIntegrationRuntimeNodeInner model = BinaryData.fromString( - "{\"nodeName\":\"e\",\"machineName\":\"sgzvahapjyzhpv\",\"hostServiceUri\":\"zcjrvxdjzlmwlx\",\"status\":\"Online\",\"capabilities\":{\"nnprn\":\"hzovawjvzunlut\",\"eilpjzuaejxdu\":\"i\",\"pwo\":\"tskzbbtdzumveek\",\"fpbsjyofdxl\":\"uh\"},\"versionStatus\":\"sd\",\"version\":\"ouwaboekqvkeln\",\"registerTime\":\"2021-02-02T03:25:54Z\",\"lastConnectTime\":\"2020-12-28T23:28:21Z\",\"expiryTime\":\"2021-07-06T07:16:14Z\",\"lastStartTime\":\"2021-03-30T06:11:19Z\",\"lastStopTime\":\"2021-08-23T09:42:03Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-01-31T17:20:46Z\",\"lastEndUpdateTime\":\"2021-07-31T18:32:18Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1744629944,\"maxConcurrentJobs\":923639125,\"\":{\"iidzyexzne\":\"dataawjoyaqcslyjp\"}}") - .toObject(SelfHostedIntegrationRuntimeNodeInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SelfHostedIntegrationRuntimeNodeInner model = new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "e", "lastStartUpdateTime", "2021-01-31T17:20:46Z", - "lastConnectTime", "2020-12-28T23:28:21Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"nnprn\":\"hzovawjvzunlut\",\"eilpjzuaejxdu\":\"i\",\"pwo\":\"tskzbbtdzumveek\",\"fpbsjyofdxl\":\"uh\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "zcjrvxdjzlmwlx", "registerTime", "2021-02-02T03:25:54Z", "maxConcurrentJobs", - 923639125, "lastStopTime", "2021-08-23T09:42:03Z", "version", "ouwaboekqvkeln", "machineName", - "sgzvahapjyzhpv", "versionStatus", "sd", "concurrentJobsLimit", 1744629944, "lastEndUpdateTime", - "2021-07-31T18:32:18Z", "expiryTime", "2021-07-06T07:16:14Z", "lastStartTime", "2021-03-30T06:11:19Z", - "lastUpdateResult", "Fail", "isActiveDispatcher", true, "status", "Online")); - model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeNodeInner.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java deleted file mode 100644 index 4501d60877ae..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTests.java +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.core.util.serializer.JacksonAdapter; -import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner; -import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntime; -import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntimeStatus; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class SelfHostedIntegrationRuntimeStatusTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SelfHostedIntegrationRuntimeStatus model = BinaryData.fromString( - "{\"type\":\"SelfHosted\",\"typeProperties\":{\"createTime\":\"2021-01-24T04:39:44Z\",\"taskQueueId\":\"rzcw\",\"internalChannelEncryption\":\"NotSet\",\"version\":\"xlfzlnzyrgrl\",\"nodes\":[{\"nodeName\":\"aunjovlxq\",\"machineName\":\"mvzpniqwxmrgmnk\",\"hostServiceUri\":\"lhzkrazkioi\",\"status\":\"Offline\",\"capabilities\":{\"hehgvmmnoyz\":\"vzmsvzngheq\",\"ypkfcdfuxi\":\"nbnyplu\",\"fqhxytsqmbwcacwa\":\"zvxotnoilqcdvhy\"},\"versionStatus\":\"akvokyaxxr\",\"version\":\"qlreqbrcmmdts\",\"registerTime\":\"2021-10-15T01:39:35Z\",\"lastConnectTime\":\"2021-11-22T06:44:46Z\",\"expiryTime\":\"2021-05-03T23:50:10Z\",\"lastStartTime\":\"2021-03-22T07:37:53Z\",\"lastStopTime\":\"2021-07-30T01:28:51Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-08-29T02:23:10Z\",\"lastEndUpdateTime\":\"2021-09-08T14:59:48Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1186969393,\"maxConcurrentJobs\":296106608,\"\":{\"zkkagvej\":\"datax\",\"avamzmzfntte\":\"datamnaphrskmpeajzzy\",\"txytja\":\"datar\"}}],\"scheduledUpdateDate\":\"2021-10-19T04:38:35Z\",\"updateDelayOffset\":\"rjlijkkvbfaehjji\",\"localTimeZoneOffset\":\"jqxavqmdmracfsf\",\"capabilities\":{\"ygy\":\"alihhss\",\"rm\":\"hcvlavyrjlnd\",\"jtpdru\":\"zvti\"},\"serviceUrls\":[\"xoyjyhutwedigiv\"],\"autoUpdate\":\"Off\",\"versionStatus\":\"cxf\",\"links\":[{\"name\":\"tmca\",\"subscriptionId\":\"qpmfhjikqcnbdq\",\"dataFactoryName\":\"ghnme\",\"dataFactoryLocation\":\"vrchmyucgrm\",\"createTime\":\"2021-07-09T14:29:52Z\"},{\"name\":\"d\",\"subscriptionId\":\"plgqqqgrbr\",\"dataFactoryName\":\"vipgtipa\",\"dataFactoryLocation\":\"ylwhf\",\"createTime\":\"2021-04-09T22:18:32Z\"}],\"pushedVersion\":\"ea\",\"latestVersion\":\"ypjixdmobadydw\",\"autoUpdateETA\":\"2021-05-06T11:19:58Z\",\"selfContainedInteractiveAuthoringEnabled\":true},\"dataFactoryName\":\"clsxdqdchnzibix\",\"state\":\"Offline\",\"\":{\"qkwargcbgdgos\":\"datax\",\"wqykmvugflh\":\"datajiqex\",\"nkvthwta\":\"datahoxurhc\"}}") - .toObject(SelfHostedIntegrationRuntimeStatus.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SelfHostedIntegrationRuntimeStatus model = new SelfHostedIntegrationRuntimeStatus() - .withNodes(Arrays.asList(new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf( - "nodeName", "aunjovlxq", "lastStartUpdateTime", "2021-08-29T02:23:10Z", "lastConnectTime", - "2021-11-22T06:44:46Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"hehgvmmnoyz\":\"vzmsvzngheq\",\"ypkfcdfuxi\":\"nbnyplu\",\"fqhxytsqmbwcacwa\":\"zvxotnoilqcdvhy\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "lhzkrazkioi", "registerTime", "2021-10-15T01:39:35Z", "maxConcurrentJobs", 296106608, - "lastStopTime", "2021-07-30T01:28:51Z", "version", "qlreqbrcmmdts", "machineName", "mvzpniqwxmrgmnk", - "versionStatus", "akvokyaxxr", "concurrentJobsLimit", 1186969393, "lastEndUpdateTime", - "2021-09-08T14:59:48Z", "expiryTime", "2021-05-03T23:50:10Z", "lastStartTime", "2021-03-22T07:37:53Z", - "lastUpdateResult", "None", "isActiveDispatcher", true, "status", "Offline")))) - .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime())); - model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeStatus.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java deleted file mode 100644 index a686c050c973..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SelfHostedIntegrationRuntimeStatusTypePropertiesTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.core.util.serializer.JacksonAdapter; -import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeNodeInner; -import com.azure.resourcemanager.datafactory.fluent.models.SelfHostedIntegrationRuntimeStatusTypeProperties; -import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class SelfHostedIntegrationRuntimeStatusTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SelfHostedIntegrationRuntimeStatusTypeProperties model = BinaryData.fromString( - "{\"createTime\":\"2021-07-28T04:18:35Z\",\"taskQueueId\":\"gyvxhfmuhkezuucq\",\"internalChannelEncryption\":\"SslEncrypted\",\"version\":\"dxvbeqzjdwxt\",\"nodes\":[{\"nodeName\":\"wnee\",\"machineName\":\"ytlxugjr\",\"hostServiceUri\":\"nffaofkvfruxzkfb\",\"status\":\"NeedRegistration\",\"capabilities\":{\"stvymdqaymqmyrnz\":\"zo\",\"voyjdgfkrq\":\"ubqkfnox\",\"juguvnxbo\":\"jrvpakxrdeexw\",\"kbeadyfen\":\"pzurnzoy\"},\"versionStatus\":\"zoijoxcb\",\"version\":\"iwse\",\"registerTime\":\"2021-05-31T21:20:24Z\",\"lastConnectTime\":\"2021-10-30T09:06:39Z\",\"expiryTime\":\"2021-06-19T04:36:39Z\",\"lastStartTime\":\"2020-12-23T12:06:27Z\",\"lastStopTime\":\"2020-12-23T08:41:04Z\",\"lastUpdateResult\":\"None\",\"lastStartUpdateTime\":\"2021-05-29T21:21:29Z\",\"lastEndUpdateTime\":\"2021-01-04T14:07:23Z\",\"isActiveDispatcher\":true,\"concurrentJobsLimit\":1173135899,\"maxConcurrentJobs\":129339132,\"\":{\"yajmm\":\"datacwkkgqy\",\"bgrufsdbkuxkdi\":\"datazupd\",\"s\":\"datam\",\"ceylaulpuexyigxz\":\"datavxwkscwbshfihvlm\"}},{\"nodeName\":\"ecxdslspgnndefyh\",\"machineName\":\"yhwl\",\"hostServiceUri\":\"v\",\"status\":\"Upgrading\",\"capabilities\":{\"vzr\":\"prqtfk\",\"uhbgftfvqukk\":\"pmonxdwf\",\"rjylw\":\"vzenegpd\"},\"versionStatus\":\"semjhhxlsu\",\"version\":\"hztb\",\"registerTime\":\"2021-09-19T20:36:09Z\",\"lastConnectTime\":\"2021-03-07T04:32:25Z\",\"expiryTime\":\"2021-11-04T03:20:26Z\",\"lastStartTime\":\"2021-09-11T09:38:16Z\",\"lastStopTime\":\"2021-01-18T21:21:35Z\",\"lastUpdateResult\":\"Fail\",\"lastStartUpdateTime\":\"2021-07-12T20:50:44Z\",\"lastEndUpdateTime\":\"2021-02-10T12:07:01Z\",\"isActiveDispatcher\":false,\"concurrentJobsLimit\":792803694,\"maxConcurrentJobs\":2047573598,\"\":{\"mcyk\":\"datakykqfl\",\"xhcbeejn\":\"datamysmkbndnrihpj\"}}],\"scheduledUpdateDate\":\"2021-07-30T19:01:28Z\",\"updateDelayOffset\":\"ydlkrnpsbnm\",\"localTimeZoneOffset\":\"hkipjardvdp\",\"capabilities\":{\"elnpbieclund\":\"dxmi\",\"fskjvayc\":\"vjlpbjszq\",\"pbrzwi\":\"rwknsbgh\"},\"serviceUrls\":[\"cyhkecebtpgvut\",\"susfdyw\",\"rqcowk\"],\"autoUpdate\":\"Off\",\"versionStatus\":\"rcifflxqqn\",\"links\":[{\"name\":\"uyuwgnyj\",\"subscriptionId\":\"ujticwmlf\",\"dataFactoryName\":\"hibfmco\",\"dataFactoryLocation\":\"ktuajkufp\",\"createTime\":\"2021-05-14T23:34:35Z\"},{\"name\":\"nmeiomnobbaibcf\",\"subscriptionId\":\"yqzaisfofg\",\"dataFactoryName\":\"rkmgifmyzbuhdnhh\",\"dataFactoryLocation\":\"ts\",\"createTime\":\"2021-12-08T00:22:03Z\"},{\"name\":\"dpon\",\"subscriptionId\":\"lqivcnuqf\",\"dataFactoryName\":\"jzzbpcwtwtrchkcm\",\"dataFactoryLocation\":\"awqesqsqmiekx\",\"createTime\":\"2021-02-24T05:57:01Z\"},{\"name\":\"chf\",\"subscriptionId\":\"ykkvjjlbajcu\",\"dataFactoryName\":\"yqokbgumuejxxpx\",\"dataFactoryLocation\":\"zch\",\"createTime\":\"2021-09-20T13:29:17Z\"}],\"pushedVersion\":\"ginrk\",\"latestVersion\":\"ngzfsula\",\"autoUpdateETA\":\"2021-01-05T02:41:11Z\",\"selfContainedInteractiveAuthoringEnabled\":false}") - .toObject(SelfHostedIntegrationRuntimeStatusTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SelfHostedIntegrationRuntimeStatusTypeProperties model = new SelfHostedIntegrationRuntimeStatusTypeProperties() - .withNodes(Arrays.asList(new SelfHostedIntegrationRuntimeNodeInner().withAdditionalProperties(mapOf( - "nodeName", "wnee", "lastStartUpdateTime", "2021-05-29T21:21:29Z", "lastConnectTime", - "2021-10-30T09:06:39Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"stvymdqaymqmyrnz\":\"zo\",\"voyjdgfkrq\":\"ubqkfnox\",\"juguvnxbo\":\"jrvpakxrdeexw\",\"kbeadyfen\":\"pzurnzoy\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "nffaofkvfruxzkfb", "registerTime", "2021-05-31T21:20:24Z", "maxConcurrentJobs", - 129339132, "lastStopTime", "2020-12-23T08:41:04Z", "version", "iwse", "machineName", "ytlxugjr", - "versionStatus", "zoijoxcb", "concurrentJobsLimit", 1173135899, "lastEndUpdateTime", - "2021-01-04T14:07:23Z", "expiryTime", "2021-06-19T04:36:39Z", "lastStartTime", "2020-12-23T12:06:27Z", - "lastUpdateResult", "None", "isActiveDispatcher", true, "status", "NeedRegistration")), - new SelfHostedIntegrationRuntimeNodeInner() - .withAdditionalProperties(mapOf("nodeName", "ecxdslspgnndefyh", "lastStartUpdateTime", - "2021-07-12T20:50:44Z", "lastConnectTime", "2021-03-07T04:32:25Z", "capabilities", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize("{\"vzr\":\"prqtfk\",\"uhbgftfvqukk\":\"pmonxdwf\",\"rjylw\":\"vzenegpd\"}", - Object.class, SerializerEncoding.JSON), - "hostServiceUri", "v", "registerTime", "2021-09-19T20:36:09Z", "maxConcurrentJobs", 2047573598, - "lastStopTime", "2021-01-18T21:21:35Z", "version", "hztb", "machineName", "yhwl", - "versionStatus", "semjhhxlsu", "concurrentJobsLimit", 792803694, "lastEndUpdateTime", - "2021-02-10T12:07:01Z", "expiryTime", "2021-11-04T03:20:26Z", "lastStartTime", - "2021-09-11T09:38:16Z", "lastUpdateResult", "Fail", "isActiveDispatcher", false, "status", - "Upgrading")))) - .withLinks(Arrays.asList(new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime(), - new LinkedIntegrationRuntime(), new LinkedIntegrationRuntime())); - model = BinaryData.fromObject(model).toObject(SelfHostedIntegrationRuntimeStatusTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java deleted file mode 100644 index bc0b6b70be84..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowObjectDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.ServiceNowObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ServiceNowObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ServiceNowObjectDataset model = BinaryData.fromString( - "{\"type\":\"ServiceNowObject\",\"typeProperties\":{\"tableName\":\"datannmjun\"},\"description\":\"lxcltjhbcycg\",\"structure\":\"datakcsihxvta\",\"schema\":\"datawf\",\"linkedServiceName\":{\"referenceName\":\"pxpry\",\"parameters\":{\"gugwlux\":\"databubwhzq\",\"mkdhwqcqweba\":\"datahtq\",\"phujeucosvk\":\"datamfpk\",\"llgnueezfpffb\":\"dataeergvypaxpjpy\"}},\"parameters\":{\"gzyojfchicpare\":{\"type\":\"Array\",\"defaultValue\":\"datavmcgm\"},\"ojuxil\":{\"type\":\"Bool\",\"defaultValue\":\"dataksgqhb\"},\"fldfljwt\":{\"type\":\"Object\",\"defaultValue\":\"datalkc\"}},\"annotations\":[\"datatsflotumbm\",\"datagftshfgmuxuqiags\",\"dataoikuqirhsk\",\"datapaowkgvnlfueyxfz\"],\"folder\":{\"name\":\"lrjugcfebpiucenb\"},\"\":{\"lsxr\":\"datalldfknbdzw\"}}") - .toObject(ServiceNowObjectDataset.class); - Assertions.assertEquals("lxcltjhbcycg", model.description()); - Assertions.assertEquals("pxpry", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("gzyojfchicpare").type()); - Assertions.assertEquals("lrjugcfebpiucenb", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ServiceNowObjectDataset model = new ServiceNowObjectDataset().withDescription("lxcltjhbcycg") - .withStructure("datakcsihxvta") - .withSchema("datawf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pxpry") - .withParameters(mapOf("gugwlux", "databubwhzq", "mkdhwqcqweba", "datahtq", "phujeucosvk", "datamfpk", - "llgnueezfpffb", "dataeergvypaxpjpy"))) - .withParameters(mapOf("gzyojfchicpare", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datavmcgm"), "ojuxil", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataksgqhb"), "fldfljwt", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datalkc"))) - .withAnnotations( - Arrays.asList("datatsflotumbm", "datagftshfgmuxuqiags", "dataoikuqirhsk", "datapaowkgvnlfueyxfz")) - .withFolder(new DatasetFolder().withName("lrjugcfebpiucenb")) - .withTableName("datannmjun"); - model = BinaryData.fromObject(model).toObject(ServiceNowObjectDataset.class); - Assertions.assertEquals("lxcltjhbcycg", model.description()); - Assertions.assertEquals("pxpry", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("gzyojfchicpare").type()); - Assertions.assertEquals("lrjugcfebpiucenb", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java deleted file mode 100644 index 3ffec930fc46..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ServiceNowSource; - -public final class ServiceNowSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ServiceNowSource model = BinaryData.fromString( - "{\"type\":\"ServiceNowSource\",\"query\":\"datajdga\",\"queryTimeout\":\"dataktnsowszbeflhx\",\"additionalColumns\":\"datargokyngarwzutzjx\",\"sourceRetryCount\":\"datassmnatnpo\",\"sourceRetryWait\":\"dataeylqysgm\",\"maxConcurrentConnections\":\"dataxngekcwe\",\"disableMetricsCollection\":\"dataqtkdginmhlgpefqq\",\"\":{\"cqgqrsopq\":\"datayowrwvbqv\",\"pvyktfuhfaabi\":\"dataiqfaxtljpyzcgugs\",\"qlkh\":\"datavslocdkpvv\",\"fmibwzuhy\":\"dataxnzjzashhiz\"}}") - .toObject(ServiceNowSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ServiceNowSource model = new ServiceNowSource().withSourceRetryCount("datassmnatnpo") - .withSourceRetryWait("dataeylqysgm") - .withMaxConcurrentConnections("dataxngekcwe") - .withDisableMetricsCollection("dataqtkdginmhlgpefqq") - .withQueryTimeout("dataktnsowszbeflhx") - .withAdditionalColumns("datargokyngarwzutzjx") - .withQuery("datajdga"); - model = BinaryData.fromObject(model).toObject(ServiceNowSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java deleted file mode 100644 index 19630a76c6cb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2ObjectDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.ServiceNowV2ObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ServiceNowV2ObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ServiceNowV2ObjectDataset model = BinaryData.fromString( - "{\"type\":\"ServiceNowV2Object\",\"typeProperties\":{\"tableName\":\"datavybfmpotal\"},\"description\":\"figrxxtrco\",\"structure\":\"dataqe\",\"schema\":\"dataldmxxbjh\",\"linkedServiceName\":{\"referenceName\":\"pvamsxrwqlwdf\",\"parameters\":{\"bboffgxtae\":\"datarplzeqzv\",\"fcyatbxdwr\":\"dataxt\",\"fbpeigkflvovriq\":\"datayvtkmxvztshnu\"}},\"parameters\":{\"txur\":{\"type\":\"Float\",\"defaultValue\":\"datakqcgzygtdjhtbar\"}},\"annotations\":[\"datayyumhzpst\",\"datacqacvttyh\",\"databilnszyjbuw\"],\"folder\":{\"name\":\"sydsci\"},\"\":{\"l\":\"dataayioxpqgqs\",\"akqsjymcfv\":\"datalefeombodvdgf\",\"nbpkfnxrlncmlzvv\":\"datazceuyuqktck\",\"cjqzrevfwcba\":\"datamesfhqs\"}}") - .toObject(ServiceNowV2ObjectDataset.class); - Assertions.assertEquals("figrxxtrco", model.description()); - Assertions.assertEquals("pvamsxrwqlwdf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); - Assertions.assertEquals("sydsci", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ServiceNowV2ObjectDataset model = new ServiceNowV2ObjectDataset().withDescription("figrxxtrco") - .withStructure("dataqe") - .withSchema("dataldmxxbjh") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("pvamsxrwqlwdf") - .withParameters(mapOf("bboffgxtae", "datarplzeqzv", "fcyatbxdwr", "dataxt", "fbpeigkflvovriq", - "datayvtkmxvztshnu"))) - .withParameters(mapOf("txur", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datakqcgzygtdjhtbar"))) - .withAnnotations(Arrays.asList("datayyumhzpst", "datacqacvttyh", "databilnszyjbuw")) - .withFolder(new DatasetFolder().withName("sydsci")) - .withTableName("datavybfmpotal"); - model = BinaryData.fromObject(model).toObject(ServiceNowV2ObjectDataset.class); - Assertions.assertEquals("figrxxtrco", model.description()); - Assertions.assertEquals("pvamsxrwqlwdf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("txur").type()); - Assertions.assertEquals("sydsci", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java deleted file mode 100644 index dafdd2be5141..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ServiceNowV2SourceTests.java +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ExpressionV2; -import com.azure.resourcemanager.datafactory.models.ExpressionV2Type; -import com.azure.resourcemanager.datafactory.models.ServiceNowV2Source; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ServiceNowV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ServiceNowV2Source model = BinaryData.fromString( - "{\"type\":\"ServiceNowV2Source\",\"expression\":{\"type\":\"Binary\",\"value\":\"cuxgimfftvylfke\",\"operators\":[\"fq\",\"sbqdjawul\",\"yjmjvzpldhbapfrr\",\"wrmdmrhsybvn\"],\"operands\":[{\"type\":\"NAry\",\"value\":\"katjyxhvjjvsv\",\"operators\":[\"lysfro\",\"nf\",\"oamg\"],\"operands\":[{\"type\":\"NAry\",\"value\":\"gdsnrkn\",\"operators\":[\"gjukqnxy\",\"oyclrlepashmfbzk\"],\"operands\":[{},{}]},{\"type\":\"Unary\",\"value\":\"vofnsuwsurod\",\"operators\":[\"ngbbxahsqors\",\"ndslrndiu\",\"o\",\"ekdmnvaibhxujgy\"],\"operands\":[{},{},{}]},{\"type\":\"Field\",\"value\":\"zn\",\"operators\":[\"e\"],\"operands\":[{},{},{},{}]},{\"type\":\"Constant\",\"value\":\"hpxzjk\",\"operators\":[\"zpcecisnhtdsk\",\"nigohafud\",\"eowepueq\",\"rcnfhcqgjvlnvfz\"],\"operands\":[{},{},{}]}]},{\"type\":\"Field\",\"value\":\"ugetwgjlxdddvfn\",\"operators\":[\"va\",\"spjdxaytzkdqim\",\"m\",\"ijcullk\"],\"operands\":[{\"type\":\"Field\",\"value\":\"red\",\"operators\":[\"y\"],\"operands\":[{}]},{\"type\":\"Unary\",\"value\":\"y\",\"operators\":[\"vjsqazecdomjrr\",\"lwrvi\",\"aqxstykus\"],\"operands\":[{},{},{}]},{\"type\":\"Field\",\"value\":\"xiqej\",\"operators\":[\"rcnoexwar\",\"azfsrvz\",\"ycevhazwew\"],\"operands\":[{},{}]}]},{\"type\":\"Binary\",\"value\":\"dycspidc\",\"operators\":[\"fg\",\"ynuxvyalk\",\"uozwowwmulqgaeqn\"],\"operands\":[{\"type\":\"NAry\",\"value\":\"jezcwf\",\"operators\":[\"krzu\",\"epdvxmkzgrrg\"],\"operands\":[{},{}]},{\"type\":\"NAry\",\"value\":\"ebwdvuvq\",\"operators\":[\"l\",\"doamqkdwagnyah\",\"rxtpuyuradfiwjou\"],\"operands\":[{},{},{}]},{\"type\":\"NAry\",\"value\":\"niv\",\"operators\":[\"fw\",\"zko\",\"djwjmrbphtllkpk\",\"qzbvyrvfxcbatmv\"],\"operands\":[{}]}]},{\"type\":\"Constant\",\"value\":\"iosy\",\"operators\":[\"frbujltgxhgyllas\",\"whbmo\",\"mhknsknnnpyo\",\"yinyqsdsuewfgri\"],\"operands\":[{\"type\":\"Constant\",\"value\":\"rtmvtfeyopg\",\"operators\":[\"ebmcizmgg\",\"sxvgwrq\"],\"operands\":[{},{},{},{}]},{\"type\":\"Field\",\"value\":\"a\",\"operators\":[\"lerkyimcfmdh\",\"tl\"],\"operands\":[{},{},{},{}]},{\"type\":\"NAry\",\"value\":\"cluvjpp\",\"operators\":[\"ldthshcj\",\"oobltoargcntgqy\",\"wmzz\",\"gbgvf\"],\"operands\":[{},{},{}]},{\"type\":\"Unary\",\"value\":\"rtuqwvybx\",\"operators\":[\"hf\"],\"operands\":[{},{},{}]}]}]},\"queryTimeout\":\"datamdaeshjjqc\",\"additionalColumns\":\"datarnfavqefiwwhbkxz\",\"sourceRetryCount\":\"datayovlhm\",\"sourceRetryWait\":\"dataobiagwuefmyiw\",\"maxConcurrentConnections\":\"datatau\",\"disableMetricsCollection\":\"datasyfj\",\"\":{\"a\":\"dataeibcezdtf\",\"zcsff\":\"datalwllgjerql\",\"teyh\":\"dataguny\",\"kvkmfkmchc\":\"dataspk\"}}") - .toObject(ServiceNowV2Source.class); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.expression().type()); - Assertions.assertEquals("cuxgimfftvylfke", model.expression().value()); - Assertions.assertEquals("fq", model.expression().operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.NARY, model.expression().operands().get(0).type()); - Assertions.assertEquals("katjyxhvjjvsv", model.expression().operands().get(0).value()); - Assertions.assertEquals("lysfro", model.expression().operands().get(0).operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.NARY, model.expression().operands().get(0).operands().get(0).type()); - Assertions.assertEquals("gdsnrkn", model.expression().operands().get(0).operands().get(0).value()); - Assertions.assertEquals("gjukqnxy", model.expression().operands().get(0).operands().get(0).operators().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ServiceNowV2Source model = new ServiceNowV2Source().withSourceRetryCount("datayovlhm") - .withSourceRetryWait("dataobiagwuefmyiw") - .withMaxConcurrentConnections("datatau") - .withDisableMetricsCollection("datasyfj") - .withQueryTimeout("datamdaeshjjqc") - .withAdditionalColumns("datarnfavqefiwwhbkxz") - .withExpression(new ExpressionV2().withType(ExpressionV2Type.BINARY) - .withValue("cuxgimfftvylfke") - .withOperators(Arrays.asList("fq", "sbqdjawul", "yjmjvzpldhbapfrr", "wrmdmrhsybvn")) - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.NARY) - .withValue("katjyxhvjjvsv") - .withOperators(Arrays.asList("lysfro", "nf", "oamg")) - .withOperands(Arrays.asList(new ExpressionV2() - .withType(ExpressionV2Type.NARY) - .withValue("gdsnrkn") - .withOperators(Arrays.asList("gjukqnxy", "oyclrlepashmfbzk")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY) - .withValue("vofnsuwsurod") - .withOperators(Arrays.asList("ngbbxahsqors", "ndslrndiu", "o", "ekdmnvaibhxujgy")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("zn") - .withOperators(Arrays.asList("e")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.CONSTANT) - .withValue("hpxzjk") - .withOperators( - Arrays.asList("zpcecisnhtdsk", "nigohafud", "eowepueq", "rcnfhcqgjvlnvfz")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("ugetwgjlxdddvfn") - .withOperators(Arrays.asList("va", "spjdxaytzkdqim", "m", "ijcullk")) - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("red") - .withOperators(Arrays.asList("y")) - .withOperands(Arrays.asList(new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY) - .withValue("y") - .withOperators(Arrays.asList("vjsqazecdomjrr", "lwrvi", "aqxstykus")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("xiqej") - .withOperators(Arrays.asList("rcnoexwar", "azfsrvz", "ycevhazwew")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.BINARY) - .withValue("dycspidc") - .withOperators(Arrays.asList("fg", "ynuxvyalk", "uozwowwmulqgaeqn")) - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.NARY) - .withValue("jezcwf") - .withOperators(Arrays.asList("krzu", "epdvxmkzgrrg")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.NARY) - .withValue("ebwdvuvq") - .withOperators(Arrays.asList("l", "doamqkdwagnyah", "rxtpuyuradfiwjou")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.NARY) - .withValue("niv") - .withOperators(Arrays.asList("fw", "zko", "djwjmrbphtllkpk", "qzbvyrvfxcbatmv")) - .withOperands(Arrays.asList(new ExpressionV2())))), - new ExpressionV2().withType(ExpressionV2Type.CONSTANT) - .withValue("iosy") - .withOperators(Arrays.asList("frbujltgxhgyllas", "whbmo", "mhknsknnnpyo", "yinyqsdsuewfgri")) - .withOperands(Arrays.asList( - new ExpressionV2().withType(ExpressionV2Type.CONSTANT) - .withValue("rtmvtfeyopg") - .withOperators(Arrays.asList("ebmcizmgg", "sxvgwrq")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.FIELD) - .withValue("a") - .withOperators(Arrays.asList("lerkyimcfmdh", "tl")) - .withOperands(Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2(), - new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.NARY) - .withValue("cluvjpp") - .withOperators(Arrays.asList("ldthshcj", "oobltoargcntgqy", "wmzz", "gbgvf")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2())), - new ExpressionV2().withType(ExpressionV2Type.UNARY) - .withValue("rtuqwvybx") - .withOperators(Arrays.asList("hf")) - .withOperands( - Arrays.asList(new ExpressionV2(), new ExpressionV2(), new ExpressionV2()))))))); - model = BinaryData.fromObject(model).toObject(ServiceNowV2Source.class); - Assertions.assertEquals(ExpressionV2Type.BINARY, model.expression().type()); - Assertions.assertEquals("cuxgimfftvylfke", model.expression().value()); - Assertions.assertEquals("fq", model.expression().operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.NARY, model.expression().operands().get(0).type()); - Assertions.assertEquals("katjyxhvjjvsv", model.expression().operands().get(0).value()); - Assertions.assertEquals("lysfro", model.expression().operands().get(0).operators().get(0)); - Assertions.assertEquals(ExpressionV2Type.NARY, model.expression().operands().get(0).operands().get(0).type()); - Assertions.assertEquals("gdsnrkn", model.expression().operands().get(0).operands().get(0).value()); - Assertions.assertEquals("gjukqnxy", model.expression().operands().get(0).operands().get(0).operators().get(0)); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java deleted file mode 100644 index 9ac87c5f25bb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTests.java +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.SecureInputOutputPolicy; -import com.azure.resourcemanager.datafactory.models.SetVariableActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SetVariableActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SetVariableActivity model = BinaryData.fromString( - "{\"type\":\"SetVariable\",\"typeProperties\":{\"variableName\":\"jnvsjgnb\",\"value\":\"datahqsfh\",\"setSystemVariable\":true},\"policy\":{\"secureInput\":true,\"secureOutput\":true},\"name\":\"xji\",\"description\":\"k\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"jmirbnfcq\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"rymrfpqyxlncwagi\":\"datapfspfd\",\"uerhzyl\":\"dataqhzotkowi\",\"emsl\":\"datawymrmuioepi\",\"vryszqzve\":\"dataz\"}}],\"userProperties\":[{\"name\":\"ewmpwjcgry\",\"value\":\"datal\"},{\"name\":\"qcf\",\"value\":\"datarywdgrsk\"},{\"name\":\"ltcfzyijnxvmcx\",\"value\":\"datajlpyhdxvdj\"}],\"\":{\"qbqgfq\":\"dataewt\",\"xwevdjmxvvtuky\":\"datavm\"}}") - .toObject(SetVariableActivity.class); - Assertions.assertEquals("xji", model.name()); - Assertions.assertEquals("k", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("jmirbnfcq", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ewmpwjcgry", model.userProperties().get(0).name()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("jnvsjgnb", model.variableName()); - Assertions.assertEquals(true, model.setSystemVariable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SetVariableActivity model = new SetVariableActivity().withName("xji") - .withDescription("k") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("jmirbnfcq") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("ewmpwjcgry").withValue("datal"), - new UserProperty().withName("qcf").withValue("datarywdgrsk"), - new UserProperty().withName("ltcfzyijnxvmcx").withValue("datajlpyhdxvdj"))) - .withPolicy(new SecureInputOutputPolicy().withSecureInput(true).withSecureOutput(true)) - .withVariableName("jnvsjgnb") - .withValue("datahqsfh") - .withSetSystemVariable(true); - model = BinaryData.fromObject(model).toObject(SetVariableActivity.class); - Assertions.assertEquals("xji", model.name()); - Assertions.assertEquals("k", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("jmirbnfcq", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("ewmpwjcgry", model.userProperties().get(0).name()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals("jnvsjgnb", model.variableName()); - Assertions.assertEquals(true, model.setSystemVariable()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java deleted file mode 100644 index 6599ac800f2a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SetVariableActivityTypePropertiesTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SetVariableActivityTypeProperties; -import org.junit.jupiter.api.Assertions; - -public final class SetVariableActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SetVariableActivityTypeProperties model - = BinaryData.fromString("{\"variableName\":\"bjn\",\"value\":\"dataidinbf\",\"setSystemVariable\":false}") - .toObject(SetVariableActivityTypeProperties.class); - Assertions.assertEquals("bjn", model.variableName()); - Assertions.assertEquals(false, model.setSystemVariable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SetVariableActivityTypeProperties model = new SetVariableActivityTypeProperties().withVariableName("bjn") - .withValue("dataidinbf") - .withSetSystemVariable(false); - model = BinaryData.fromObject(model).toObject(SetVariableActivityTypeProperties.class); - Assertions.assertEquals("bjn", model.variableName()); - Assertions.assertEquals(false, model.setSystemVariable()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java deleted file mode 100644 index efd426998cb6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpLocationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SftpLocation; - -public final class SftpLocationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SftpLocation model = BinaryData.fromString( - "{\"type\":\"SftpLocation\",\"folderPath\":\"datalisolntfxxc\",\"fileName\":\"datamipfjw\",\"\":{\"nvgskjtoxjd\":\"datagizmshxxbaizabu\",\"xqqm\":\"datajsjznv\",\"aydhf\":\"datai\"}}") - .toObject(SftpLocation.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SftpLocation model = new SftpLocation().withFolderPath("datalisolntfxxc").withFileName("datamipfjw"); - model = BinaryData.fromObject(model).toObject(SftpLocation.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java deleted file mode 100644 index b77b96400d16..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpReadSettingsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SftpReadSettings; - -public final class SftpReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SftpReadSettings model = BinaryData.fromString( - "{\"type\":\"SftpReadSettings\",\"recursive\":\"datagl\",\"wildcardFolderPath\":\"datapewt\",\"wildcardFileName\":\"dataswul\",\"enablePartitionDiscovery\":\"dataffczwz\",\"partitionRootPath\":\"datavvb\",\"fileListPath\":\"datanpriyttiqdcjg\",\"deleteFilesAfterCompletion\":\"datacwmq\",\"modifiedDatetimeStart\":\"datawoetjrfruc\",\"modifiedDatetimeEnd\":\"datafwdxbpvbsibz\",\"disableChunking\":\"datadeyo\",\"maxConcurrentConnections\":\"datapothtpaqmfwiesh\",\"disableMetricsCollection\":\"datae\",\"\":{\"mgud\":\"dataym\",\"ecuve\":\"datay\",\"kkjvrrvj\":\"datalcwdg\"}}") - .toObject(SftpReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SftpReadSettings model = new SftpReadSettings().withMaxConcurrentConnections("datapothtpaqmfwiesh") - .withDisableMetricsCollection("datae") - .withRecursive("datagl") - .withWildcardFolderPath("datapewt") - .withWildcardFileName("dataswul") - .withEnablePartitionDiscovery("dataffczwz") - .withPartitionRootPath("datavvb") - .withFileListPath("datanpriyttiqdcjg") - .withDeleteFilesAfterCompletion("datacwmq") - .withModifiedDatetimeStart("datawoetjrfruc") - .withModifiedDatetimeEnd("datafwdxbpvbsibz") - .withDisableChunking("datadeyo"); - model = BinaryData.fromObject(model).toObject(SftpReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java deleted file mode 100644 index 72fd89c3caf5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SftpWriteSettingsTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.SftpWriteSettings; -import java.util.Arrays; - -public final class SftpWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SftpWriteSettings model = BinaryData.fromString( - "{\"type\":\"SftpWriteSettings\",\"operationTimeout\":\"dataypkiqlo\",\"useTempFileRename\":\"datazbceimsco\",\"maxConcurrentConnections\":\"datanaehllwqmraihe\",\"disableMetricsCollection\":\"datajvzlgclia\",\"copyBehavior\":\"datawxvihyi\",\"metadata\":[{\"name\":\"dataxex\",\"value\":\"datano\"},{\"name\":\"dataymgkirfzvtzrq\",\"value\":\"datalipmuufsek\"},{\"name\":\"datapufhwpkpejfszw\",\"value\":\"datamswb\"},{\"name\":\"datamfwjcn\",\"value\":\"dataajptjhwrn\"}],\"\":{\"pafj\":\"dataka\",\"qja\":\"datadc\",\"dykjchzcz\":\"datafrzqwytgggidr\",\"tcvdzytsd\":\"datacpqkpgbssjqj\"}}") - .toObject(SftpWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SftpWriteSettings model = new SftpWriteSettings().withMaxConcurrentConnections("datanaehllwqmraihe") - .withDisableMetricsCollection("datajvzlgclia") - .withCopyBehavior("datawxvihyi") - .withMetadata(Arrays.asList(new MetadataItem().withName("dataxex").withValue("datano"), - new MetadataItem().withName("dataymgkirfzvtzrq").withValue("datalipmuufsek"), - new MetadataItem().withName("datapufhwpkpejfszw").withValue("datamswb"), - new MetadataItem().withName("datamfwjcn").withValue("dataajptjhwrn"))) - .withOperationTimeout("dataypkiqlo") - .withUseTempFileRename("datazbceimsco"); - model = BinaryData.fromObject(model).toObject(SftpWriteSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java deleted file mode 100644 index 8050973a391c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SharePointOnlineListDatasetTypeProperties; - -public final class SharePointOnlineListDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SharePointOnlineListDatasetTypeProperties model = BinaryData.fromString("{\"listName\":\"datajomeq\"}") - .toObject(SharePointOnlineListDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SharePointOnlineListDatasetTypeProperties model - = new SharePointOnlineListDatasetTypeProperties().withListName("datajomeq"); - model = BinaryData.fromObject(model).toObject(SharePointOnlineListDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java deleted file mode 100644 index d2f382336a89..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListResourceDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SharePointOnlineListResourceDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SharePointOnlineListResourceDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SharePointOnlineListResourceDataset model = BinaryData.fromString( - "{\"type\":\"SharePointOnlineListResource\",\"typeProperties\":{\"listName\":\"datafkryxs\"},\"description\":\"aknk\",\"structure\":\"datah\",\"schema\":\"datap\",\"linkedServiceName\":{\"referenceName\":\"vihbmwrv\",\"parameters\":{\"vxznirnygtixkg\":\"datarohulobkabhvxjua\",\"qdi\":\"dataobmkphvdlorxz\"}},\"parameters\":{\"e\":{\"type\":\"String\",\"defaultValue\":\"datatfcieil\"}},\"annotations\":[\"datakehldopjsxvbbwsg\",\"datakkmibnmdp\",\"datad\",\"datapwtgzwmzhcmrloqa\"],\"folder\":{\"name\":\"yzavky\"},\"\":{\"bngzldvvd\":\"dataudnmbj\",\"pmq\":\"dataoptythctoxo\",\"sfzsgzgus\":\"dataerwhemvids\"}}") - .toObject(SharePointOnlineListResourceDataset.class); - Assertions.assertEquals("aknk", model.description()); - Assertions.assertEquals("vihbmwrv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("e").type()); - Assertions.assertEquals("yzavky", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SharePointOnlineListResourceDataset model = new SharePointOnlineListResourceDataset().withDescription("aknk") - .withStructure("datah") - .withSchema("datap") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vihbmwrv") - .withParameters(mapOf("vxznirnygtixkg", "datarohulobkabhvxjua", "qdi", "dataobmkphvdlorxz"))) - .withParameters( - mapOf("e", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datatfcieil"))) - .withAnnotations(Arrays.asList("datakehldopjsxvbbwsg", "datakkmibnmdp", "datad", "datapwtgzwmzhcmrloqa")) - .withFolder(new DatasetFolder().withName("yzavky")) - .withListName("datafkryxs"); - model = BinaryData.fromObject(model).toObject(SharePointOnlineListResourceDataset.class); - Assertions.assertEquals("aknk", model.description()); - Assertions.assertEquals("vihbmwrv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("e").type()); - Assertions.assertEquals("yzavky", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java deleted file mode 100644 index 236a744a4104..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SharePointOnlineListSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SharePointOnlineListSource; - -public final class SharePointOnlineListSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SharePointOnlineListSource model = BinaryData.fromString( - "{\"type\":\"SharePointOnlineListSource\",\"query\":\"databen\",\"httpRequestTimeout\":\"datay\",\"sourceRetryCount\":\"datavslpythqgziplac\",\"sourceRetryWait\":\"datavfdhsmqy\",\"maxConcurrentConnections\":\"dataefsnlob\",\"disableMetricsCollection\":\"datarj\",\"\":{\"ebjykaf\":\"datajgokvlix\",\"smcncjtovhcel\":\"dataizgkv\"}}") - .toObject(SharePointOnlineListSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SharePointOnlineListSource model = new SharePointOnlineListSource().withSourceRetryCount("datavslpythqgziplac") - .withSourceRetryWait("datavfdhsmqy") - .withMaxConcurrentConnections("dataefsnlob") - .withDisableMetricsCollection("datarj") - .withQuery("databen") - .withHttpRequestTimeout("datay"); - model = BinaryData.fromObject(model).toObject(SharePointOnlineListSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java deleted file mode 100644 index 5e5432768e38..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifyObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.ShopifyObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ShopifyObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ShopifyObjectDataset model = BinaryData.fromString( - "{\"type\":\"ShopifyObject\",\"typeProperties\":{\"tableName\":\"dataj\"},\"description\":\"wrduxntpfxxgja\",\"structure\":\"dataxfwf\",\"schema\":\"dataqv\",\"linkedServiceName\":{\"referenceName\":\"yfbkqynlzx\",\"parameters\":{\"kiehdmv\":\"datalupj\",\"rx\":\"datao\",\"cuans\":\"dataxffg\"}},\"parameters\":{\"fusekijhminenkb\":{\"type\":\"Array\",\"defaultValue\":\"datagcgcsapvbcq\"},\"bfvvcwvu\":{\"type\":\"SecureString\",\"defaultValue\":\"datayrkvorlfqmljewy\"}},\"annotations\":[\"dataju\",\"dataavvlnpbsotm\",\"datanklnmrz\"],\"folder\":{\"name\":\"vrkkfcwxizkstxne\"},\"\":{\"tc\":\"dataipx\",\"yyaeiivj\":\"datavriuvnfazxtvs\",\"wojoqf\":\"datakqtjwrv\"}}") - .toObject(ShopifyObjectDataset.class); - Assertions.assertEquals("wrduxntpfxxgja", model.description()); - Assertions.assertEquals("yfbkqynlzx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("fusekijhminenkb").type()); - Assertions.assertEquals("vrkkfcwxizkstxne", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ShopifyObjectDataset model = new ShopifyObjectDataset().withDescription("wrduxntpfxxgja") - .withStructure("dataxfwf") - .withSchema("dataqv") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("yfbkqynlzx") - .withParameters(mapOf("kiehdmv", "datalupj", "rx", "datao", "cuans", "dataxffg"))) - .withParameters(mapOf("fusekijhminenkb", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datagcgcsapvbcq"), - "bfvvcwvu", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("datayrkvorlfqmljewy"))) - .withAnnotations(Arrays.asList("dataju", "dataavvlnpbsotm", "datanklnmrz")) - .withFolder(new DatasetFolder().withName("vrkkfcwxizkstxne")) - .withTableName("dataj"); - model = BinaryData.fromObject(model).toObject(ShopifyObjectDataset.class); - Assertions.assertEquals("wrduxntpfxxgja", model.description()); - Assertions.assertEquals("yfbkqynlzx", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.ARRAY, model.parameters().get("fusekijhminenkb").type()); - Assertions.assertEquals("vrkkfcwxizkstxne", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java deleted file mode 100644 index edb1ea704d06..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ShopifySourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ShopifySource; - -public final class ShopifySourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ShopifySource model = BinaryData.fromString( - "{\"type\":\"ShopifySource\",\"query\":\"datajckgwtbfxxsfjnba\",\"queryTimeout\":\"datajighmkds\",\"additionalColumns\":\"datayyhtiyxehmn\",\"sourceRetryCount\":\"dataoy\",\"sourceRetryWait\":\"dataehkytl\",\"maxConcurrentConnections\":\"datamyznwrcfqwkqul\",\"disableMetricsCollection\":\"dataovqohwiw\",\"\":{\"sjjjcd\":\"dataxjxlssosndnypx\",\"xb\":\"datasvgdbfni\",\"jgczpdio\":\"datasjhpm\",\"cwmabehr\":\"datadtjylimzvjwjhmtc\"}}") - .toObject(ShopifySource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ShopifySource model = new ShopifySource().withSourceRetryCount("dataoy") - .withSourceRetryWait("dataehkytl") - .withMaxConcurrentConnections("datamyznwrcfqwkqul") - .withDisableMetricsCollection("dataovqohwiw") - .withQueryTimeout("datajighmkds") - .withAdditionalColumns("datayyhtiyxehmn") - .withQuery("datajckgwtbfxxsfjnba"); - model = BinaryData.fromObject(model).toObject(ShopifySource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java deleted file mode 100644 index 6ac0e9c4d567..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SkipErrorFileTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SkipErrorFile; - -public final class SkipErrorFileTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SkipErrorFile model - = BinaryData.fromString("{\"fileMissing\":\"datatetfdpu\",\"dataInconsistency\":\"datazdtjbesfum\"}") - .toObject(SkipErrorFile.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SkipErrorFile model - = new SkipErrorFile().withFileMissing("datatetfdpu").withDataInconsistency("datazdtjbesfum"); - model = BinaryData.fromObject(model).toObject(SkipErrorFile.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java deleted file mode 100644 index 093a7fee738d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SnowflakeDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SnowflakeDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeDataset model = BinaryData.fromString( - "{\"type\":\"SnowflakeTable\",\"typeProperties\":{\"schema\":\"dataeznl\",\"table\":\"datanfzxtfzqyugt\"},\"description\":\"n\",\"structure\":\"datadytnlrlcmwbejy\",\"schema\":\"datawvnhwwkrmqev\",\"linkedServiceName\":{\"referenceName\":\"hha\",\"parameters\":{\"iakgyj\":\"dataudfyziruqvgnj\",\"gikyluyu\":\"datazbm\",\"c\":\"datambrdcvoloxtv\"}},\"parameters\":{\"vokkyankxvcpt\":{\"type\":\"Object\",\"defaultValue\":\"datammglvnbenkp\"},\"rdxpcpautfzptr\":{\"type\":\"Int\",\"defaultValue\":\"databhnkxasomafegazh\"}},\"annotations\":[\"dataytrtffvpkdx\",\"datayuwenbq\"],\"folder\":{\"name\":\"awvoqatdjkal\"},\"\":{\"smxfzynfemqy\":\"datae\",\"wgssdquupirnb\":\"datakkp\",\"irzyudrq\":\"datalqyvdsqxkjwdzp\"}}") - .toObject(SnowflakeDataset.class); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals("hha", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("vokkyankxvcpt").type()); - Assertions.assertEquals("awvoqatdjkal", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeDataset model = new SnowflakeDataset().withDescription("n") - .withStructure("datadytnlrlcmwbejy") - .withSchema("datawvnhwwkrmqev") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hha") - .withParameters(mapOf("iakgyj", "dataudfyziruqvgnj", "gikyluyu", "datazbm", "c", "datambrdcvoloxtv"))) - .withParameters(mapOf("vokkyankxvcpt", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datammglvnbenkp"), - "rdxpcpautfzptr", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databhnkxasomafegazh"))) - .withAnnotations(Arrays.asList("dataytrtffvpkdx", "datayuwenbq")) - .withFolder(new DatasetFolder().withName("awvoqatdjkal")) - .withSchemaTypePropertiesSchema("dataeznl") - .withTable("datanfzxtfzqyugt"); - model = BinaryData.fromObject(model).toObject(SnowflakeDataset.class); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals("hha", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("vokkyankxvcpt").type()); - Assertions.assertEquals("awvoqatdjkal", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java deleted file mode 100644 index ace86702afcb..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SnowflakeDatasetTypeProperties; - -public final class SnowflakeDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"datarxhxmlfouqpskv\",\"table\":\"datadb\"}") - .toObject(SnowflakeDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeDatasetTypeProperties model - = new SnowflakeDatasetTypeProperties().withSchema("datarxhxmlfouqpskv").withTable("datadb"); - model = BinaryData.fromObject(model).toObject(SnowflakeDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java deleted file mode 100644 index b4e4a785902d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeExportCopyCommandTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeExportCopyCommand; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeExportCopyCommandTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeExportCopyCommand model = BinaryData.fromString( - "{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"sewjqg\":\"datayssz\",\"x\":\"dataloorhxduregljqp\",\"mhvwgc\":\"dataaakgdka\"},\"additionalFormatOptions\":{\"goomapcaxno\":\"databdkq\",\"jzt\":\"datanjfvjqvectoo\",\"vsrvkzv\":\"dataalsnm\"},\"storageIntegration\":\"dataeztmdyb\",\"\":{\"qogtnfla\":\"dataj\",\"q\":\"dataspghfv\",\"ocrr\":\"datajmyqosrsf\",\"dpyohnmru\":\"datarr\"}}") - .toObject(SnowflakeExportCopyCommand.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeExportCopyCommand model = new SnowflakeExportCopyCommand() - .withAdditionalCopyOptions(mapOf("sewjqg", "datayssz", "x", "dataloorhxduregljqp", "mhvwgc", "dataaakgdka")) - .withAdditionalFormatOptions( - mapOf("goomapcaxno", "databdkq", "jzt", "datanjfvjqvectoo", "vsrvkzv", "dataalsnm")) - .withStorageIntegration("dataeztmdyb"); - model = BinaryData.fromObject(model).toObject(SnowflakeExportCopyCommand.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java deleted file mode 100644 index af112e23fe03..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeImportCopyCommandTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeImportCopyCommand; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeImportCopyCommandTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeImportCopyCommand model = BinaryData.fromString( - "{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"gjh\":\"dataireetvjfizaf\",\"fwlyeiaj\":\"datauvndgrolgxajcsi\"},\"additionalFormatOptions\":{\"pwxx\":\"datamqteirrjjmvrzfpp\",\"gaxloafws\":\"datagzhnpxbuwauytqd\"},\"storageIntegration\":\"dataxqrokw\",\"\":{\"ql\":\"datapnd\"}}") - .toObject(SnowflakeImportCopyCommand.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeImportCopyCommand model = new SnowflakeImportCopyCommand() - .withAdditionalCopyOptions(mapOf("gjh", "dataireetvjfizaf", "fwlyeiaj", "datauvndgrolgxajcsi")) - .withAdditionalFormatOptions(mapOf("pwxx", "datamqteirrjjmvrzfpp", "gaxloafws", "datagzhnpxbuwauytqd")) - .withStorageIntegration("dataxqrokw"); - model = BinaryData.fromObject(model).toObject(SnowflakeImportCopyCommand.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java deleted file mode 100644 index 502f972975c0..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSinkTests.java +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeImportCopyCommand; -import com.azure.resourcemanager.datafactory.models.SnowflakeSink; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeSink model = BinaryData.fromString( - "{\"type\":\"SnowflakeSink\",\"preCopyScript\":\"datagqyszjuijzhvejqj\",\"importSettings\":{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"ykjpakaou\":\"datanyl\"},\"additionalFormatOptions\":{\"dpd\":\"datanfxaicheycakkona\",\"zyxaanhwuq\":\"datayha\",\"fwbekpeer\":\"datawcnhsksfbkxfkeeq\"},\"storageIntegration\":\"dataswzmrpdjrylfp\",\"\":{\"ehxban\":\"dataxbigeur\",\"ffgohrhjspsyh\":\"datasqfhatqssngevi\",\"ydtgpvnczfp\":\"dataapynpvgyafttbet\",\"kajkyrhucbfkaqlp\":\"dataybjku\"}},\"writeBatchSize\":\"dataptero\",\"writeBatchTimeout\":\"dataqaktao\",\"sinkRetryCount\":\"datagefobcqvzmyw\",\"sinkRetryWait\":\"datayns\",\"maxConcurrentConnections\":\"datamosqvojgolmkklz\",\"disableMetricsCollection\":\"dataau\",\"\":{\"jhoykgtyvrn\":\"datalntaoi\"}}") - .toObject(SnowflakeSink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeSink model = new SnowflakeSink().withWriteBatchSize("dataptero") - .withWriteBatchTimeout("dataqaktao") - .withSinkRetryCount("datagefobcqvzmyw") - .withSinkRetryWait("datayns") - .withMaxConcurrentConnections("datamosqvojgolmkklz") - .withDisableMetricsCollection("dataau") - .withPreCopyScript("datagqyszjuijzhvejqj") - .withImportSettings(new SnowflakeImportCopyCommand() - .withAdditionalCopyOptions(mapOf("ykjpakaou", "datanyl")) - .withAdditionalFormatOptions( - mapOf("dpd", "datanfxaicheycakkona", "zyxaanhwuq", "datayha", "fwbekpeer", "datawcnhsksfbkxfkeeq")) - .withStorageIntegration("dataswzmrpdjrylfp")); - model = BinaryData.fromObject(model).toObject(SnowflakeSink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java deleted file mode 100644 index 091fa09a2578..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeSourceTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeExportCopyCommand; -import com.azure.resourcemanager.datafactory.models.SnowflakeSource; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeSource model = BinaryData.fromString( - "{\"type\":\"SnowflakeSource\",\"query\":\"dataumuuqwcka\",\"exportSettings\":{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"ipdqo\":\"datatdfzjwjefclihana\"},\"additionalFormatOptions\":{\"q\":\"datalqzopvhwmtdbfrj\",\"ps\":\"datauv\",\"scntdw\":\"datafeagordbs\"},\"storageIntegration\":\"datax\",\"\":{\"n\":\"datatowdwiffagfe\",\"ltthsuzxyl\":\"databpgc\",\"scobhhblj\":\"dataiflzsrk\",\"us\":\"datavpokvhobygffuzh\"}},\"sourceRetryCount\":\"dataff\",\"sourceRetryWait\":\"dataoovfwzysvnvrfjg\",\"maxConcurrentConnections\":\"dataup\",\"disableMetricsCollection\":\"datag\",\"\":{\"rowrmesziubkyvc\":\"datavwuje\",\"wdjbyaav\":\"datakoufwkaomytlx\",\"xyhuetztorhu\":\"datamsxamncuhxznma\"}}") - .toObject(SnowflakeSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeSource model = new SnowflakeSource().withSourceRetryCount("dataff") - .withSourceRetryWait("dataoovfwzysvnvrfjg") - .withMaxConcurrentConnections("dataup") - .withDisableMetricsCollection("datag") - .withQuery("dataumuuqwcka") - .withExportSettings( - new SnowflakeExportCopyCommand().withAdditionalCopyOptions(mapOf("ipdqo", "datatdfzjwjefclihana")) - .withAdditionalFormatOptions( - mapOf("q", "datalqzopvhwmtdbfrj", "ps", "datauv", "scntdw", "datafeagordbs")) - .withStorageIntegration("datax")); - model = BinaryData.fromObject(model).toObject(SnowflakeSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java deleted file mode 100644 index 42657119694e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2DatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SnowflakeV2Dataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SnowflakeV2DatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeV2Dataset model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Table\",\"typeProperties\":{\"schema\":\"datarrhyjxcqcaczzvwa\",\"table\":\"datatt\"},\"description\":\"qyfydql\",\"structure\":\"dataslgyfybdsvkllrz\",\"schema\":\"datahhkbc\",\"linkedServiceName\":{\"referenceName\":\"cazk\",\"parameters\":{\"rmrfyyqjcni\":\"datathprgfwhfzhhr\",\"sddcuqddlda\":\"datazq\",\"ew\":\"datayvfzt\",\"sxjht\":\"datajoj\"}},\"parameters\":{\"trgu\":{\"type\":\"Int\",\"defaultValue\":\"dataocygoyineuaxpmez\"},\"xfoa\":{\"type\":\"Float\",\"defaultValue\":\"dataeo\"},\"xlmndhg\":{\"type\":\"SecureString\",\"defaultValue\":\"dataypz\"},\"vmitn\":{\"type\":\"SecureString\",\"defaultValue\":\"databpjuajzqx\"}},\"annotations\":[\"datahbujysvd\",\"datayy\",\"datadbhatmabtpgn\"],\"folder\":{\"name\":\"tn\"},\"\":{\"hciga\":\"dataew\",\"qxzxtert\":\"datahmdfspkdn\",\"envtol\":\"datazrrwsciclhdw\"}}") - .toObject(SnowflakeV2Dataset.class); - Assertions.assertEquals("qyfydql", model.description()); - Assertions.assertEquals("cazk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("trgu").type()); - Assertions.assertEquals("tn", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeV2Dataset model = new SnowflakeV2Dataset().withDescription("qyfydql") - .withStructure("dataslgyfybdsvkllrz") - .withSchema("datahhkbc") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cazk") - .withParameters(mapOf("rmrfyyqjcni", "datathprgfwhfzhhr", "sddcuqddlda", "datazq", "ew", "datayvfzt", - "sxjht", "datajoj"))) - .withParameters(mapOf("trgu", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataocygoyineuaxpmez"), - "xfoa", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataeo"), - "xlmndhg", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataypz"), "vmitn", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("databpjuajzqx"))) - .withAnnotations(Arrays.asList("datahbujysvd", "datayy", "datadbhatmabtpgn")) - .withFolder(new DatasetFolder().withName("tn")) - .withSchemaTypePropertiesSchema("datarrhyjxcqcaczzvwa") - .withTable("datatt"); - model = BinaryData.fromObject(model).toObject(SnowflakeV2Dataset.class); - Assertions.assertEquals("qyfydql", model.description()); - Assertions.assertEquals("cazk", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("trgu").type()); - Assertions.assertEquals("tn", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java deleted file mode 100644 index 424895035220..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SinkTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeImportCopyCommand; -import com.azure.resourcemanager.datafactory.models.SnowflakeV2Sink; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeV2SinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeV2Sink model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Sink\",\"preCopyScript\":\"datarhctbrvegdamoy\",\"importSettings\":{\"type\":\"SnowflakeImportCopyCommand\",\"additionalCopyOptions\":{\"izbyczmepcacgvln\":\"datapkezq\"},\"additionalFormatOptions\":{\"ktuvdes\":\"databyry\"},\"storageIntegration\":\"datarulnhbqtvyhs\",\"\":{\"xexupcuizvx\":\"datarff\"}},\"writeBatchSize\":\"datavzhlkeotdscqkxzr\",\"writeBatchTimeout\":\"dataoqzmvemli\",\"sinkRetryCount\":\"datadfqfnftrrhhgwaw\",\"sinkRetryWait\":\"datah\",\"maxConcurrentConnections\":\"datavcfxdvk\",\"disableMetricsCollection\":\"datafg\",\"\":{\"txhqqvdhdyyad\":\"datau\",\"dinfauytmqvsdyqy\":\"dataxnepub\",\"xtpbapojknvxantl\":\"datakmfotwmxedlcxm\",\"nqpkvvrhoqyv\":\"dataspiipfg\"}}") - .toObject(SnowflakeV2Sink.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeV2Sink model = new SnowflakeV2Sink().withWriteBatchSize("datavzhlkeotdscqkxzr") - .withWriteBatchTimeout("dataoqzmvemli") - .withSinkRetryCount("datadfqfnftrrhhgwaw") - .withSinkRetryWait("datah") - .withMaxConcurrentConnections("datavcfxdvk") - .withDisableMetricsCollection("datafg") - .withPreCopyScript("datarhctbrvegdamoy") - .withImportSettings( - new SnowflakeImportCopyCommand().withAdditionalCopyOptions(mapOf("izbyczmepcacgvln", "datapkezq")) - .withAdditionalFormatOptions(mapOf("ktuvdes", "databyry")) - .withStorageIntegration("datarulnhbqtvyhs")); - model = BinaryData.fromObject(model).toObject(SnowflakeV2Sink.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java deleted file mode 100644 index a35a3f11631b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SnowflakeV2SourceTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SnowflakeExportCopyCommand; -import com.azure.resourcemanager.datafactory.models.SnowflakeV2Source; -import java.util.HashMap; -import java.util.Map; - -public final class SnowflakeV2SourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SnowflakeV2Source model = BinaryData.fromString( - "{\"type\":\"SnowflakeV2Source\",\"query\":\"dataai\",\"exportSettings\":{\"type\":\"SnowflakeExportCopyCommand\",\"additionalCopyOptions\":{\"ib\":\"datahlpf\",\"gzy\":\"datalmihvzdaycmen\",\"lvgqlexwqwbbell\":\"datal\"},\"additionalFormatOptions\":{\"ddfmflwfxdkpwdp\":\"dataotpc\",\"cugchtwxifudlrxb\":\"datayg\"},\"storageIntegration\":\"dataftpvgmqzitc\",\"\":{\"zvegawbmyv\":\"datalltasufqsfpze\",\"irvvvrbqxisavk\":\"datam\"}},\"sourceRetryCount\":\"dataxwz\",\"sourceRetryWait\":\"datahibridagwu\",\"maxConcurrentConnections\":\"datadymoqvcjkrynziu\",\"disableMetricsCollection\":\"datah\",\"\":{\"xjwzt\":\"datarygwagvui\",\"cv\":\"datafotllf\"}}") - .toObject(SnowflakeV2Source.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SnowflakeV2Source model = new SnowflakeV2Source().withSourceRetryCount("dataxwz") - .withSourceRetryWait("datahibridagwu") - .withMaxConcurrentConnections("datadymoqvcjkrynziu") - .withDisableMetricsCollection("datah") - .withQuery("dataai") - .withExportSettings(new SnowflakeExportCopyCommand() - .withAdditionalCopyOptions( - mapOf("ib", "datahlpf", "gzy", "datalmihvzdaycmen", "lvgqlexwqwbbell", "datal")) - .withAdditionalFormatOptions(mapOf("ddfmflwfxdkpwdp", "dataotpc", "cugchtwxifudlrxb", "datayg")) - .withStorageIntegration("dataftpvgmqzitc")); - model = BinaryData.fromObject(model).toObject(SnowflakeV2Source.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java deleted file mode 100644 index 6cc86433b36e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkConfigurationParametrizationReferenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class SparkConfigurationParametrizationReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SparkConfigurationParametrizationReference model - = BinaryData.fromString("{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"dataly\"}") - .toObject(SparkConfigurationParametrizationReference.class); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SparkConfigurationParametrizationReference model = new SparkConfigurationParametrizationReference() - .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) - .withReferenceName("dataly"); - model = BinaryData.fromObject(model).toObject(SparkConfigurationParametrizationReference.class); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java deleted file mode 100644 index 429edd25aef4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SparkDatasetTypeProperties; - -public final class SparkDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SparkDatasetTypeProperties model = BinaryData.fromString( - "{\"tableName\":\"datakscecmbaajdfwrd\",\"table\":\"datalvzkfekde\",\"schema\":\"datapjqtllbhjjpduibs\"}") - .toObject(SparkDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SparkDatasetTypeProperties model = new SparkDatasetTypeProperties().withTableName("datakscecmbaajdfwrd") - .withTable("datalvzkfekde") - .withSchema("datapjqtllbhjjpduibs"); - model = BinaryData.fromObject(model).toObject(SparkDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java deleted file mode 100644 index d2383c56f965..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkObjectDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SparkObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SparkObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SparkObjectDataset model = BinaryData.fromString( - "{\"type\":\"SparkObject\",\"typeProperties\":{\"tableName\":\"datatxtdqiusp\",\"table\":\"datazljvgjijzqjhljsa\",\"schema\":\"datajsisfqqhcmec\"},\"description\":\"sbfe\",\"structure\":\"datarpnjygllfkchhgsj\",\"schema\":\"datazcajlwmqc\",\"linkedServiceName\":{\"referenceName\":\"cabaam\",\"parameters\":{\"oqywsuarpzhryh\":\"datadhpmkxdujkxpuqzd\",\"zdsyxb\":\"datazx\",\"hwivkd\":\"datajilbuazcco\",\"pi\":\"datavjsknrbxz\"}},\"parameters\":{\"dqbvx\":{\"type\":\"Float\",\"defaultValue\":\"datai\"}},\"annotations\":[\"datalpwbopvhcbt\"],\"folder\":{\"name\":\"rjxcon\"},\"\":{\"h\":\"datakfki\",\"gvuqzgbjwvrudmp\":\"dataeoc\",\"esgyzwph\":\"dataewpmioleaja\"}}") - .toObject(SparkObjectDataset.class); - Assertions.assertEquals("sbfe", model.description()); - Assertions.assertEquals("cabaam", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("dqbvx").type()); - Assertions.assertEquals("rjxcon", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SparkObjectDataset model = new SparkObjectDataset().withDescription("sbfe") - .withStructure("datarpnjygllfkchhgsj") - .withSchema("datazcajlwmqc") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("cabaam") - .withParameters(mapOf("oqywsuarpzhryh", "datadhpmkxdujkxpuqzd", "zdsyxb", "datazx", "hwivkd", - "datajilbuazcco", "pi", "datavjsknrbxz"))) - .withParameters( - mapOf("dqbvx", new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datai"))) - .withAnnotations(Arrays.asList("datalpwbopvhcbt")) - .withFolder(new DatasetFolder().withName("rjxcon")) - .withTableName("datatxtdqiusp") - .withTable("datazljvgjijzqjhljsa") - .withSchemaTypePropertiesSchema("datajsisfqqhcmec"); - model = BinaryData.fromObject(model).toObject(SparkObjectDataset.class); - Assertions.assertEquals("sbfe", model.description()); - Assertions.assertEquals("cabaam", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.FLOAT, model.parameters().get("dqbvx").type()); - Assertions.assertEquals("rjxcon", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java deleted file mode 100644 index 18fa8ab15938..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SparkSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SparkSource; - -public final class SparkSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SparkSource model = BinaryData.fromString( - "{\"type\":\"SparkSource\",\"query\":\"dataskzw\",\"queryTimeout\":\"datah\",\"additionalColumns\":\"datahz\",\"sourceRetryCount\":\"datac\",\"sourceRetryWait\":\"datasoxoavlwwpv\",\"maxConcurrentConnections\":\"datanjwvc\",\"disableMetricsCollection\":\"datarqlceflgsndur\",\"\":{\"kuxedpqwzz\":\"datazjwmwkdehjlozzcw\",\"vucvebdfmdjnfe\":\"dataimgbxjgxrhajrub\"}}") - .toObject(SparkSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SparkSource model = new SparkSource().withSourceRetryCount("datac") - .withSourceRetryWait("datasoxoavlwwpv") - .withMaxConcurrentConnections("datanjwvc") - .withDisableMetricsCollection("datarqlceflgsndur") - .withQueryTimeout("datah") - .withAdditionalColumns("datahz") - .withQuery("dataskzw"); - model = BinaryData.fromObject(model).toObject(SparkSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java deleted file mode 100644 index 988e675d12e1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlDWSourceTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlDWSource; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; - -public final class SqlDWSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlDWSource model = BinaryData.fromString( - "{\"type\":\"SqlDWSource\",\"sqlReaderQuery\":\"datalug\",\"sqlReaderStoredProcedureName\":\"datau\",\"storedProcedureParameters\":\"dataypliotgtlan\",\"isolationLevel\":\"datakvlxsycqqdoxooxu\",\"partitionOption\":\"datafqoobwxctkveq\",\"partitionSettings\":{\"partitionColumnName\":\"datadwmhqcjrery\",\"partitionUpperBound\":\"databyqxeyzq\",\"partitionLowerBound\":\"datapsi\"},\"queryTimeout\":\"datalxvaovssibnv\",\"additionalColumns\":\"datavi\",\"sourceRetryCount\":\"datadbmzwlejiiyoon\",\"sourceRetryWait\":\"dataalr\",\"maxConcurrentConnections\":\"datazdbntopbabndw\",\"disableMetricsCollection\":\"datam\",\"\":{\"okhpst\":\"datamgdlgsxkyboysquy\",\"ukcojyx\":\"datacyigrhfevxyp\",\"otawyiq\":\"datahvoowrtcsu\"}}") - .toObject(SqlDWSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlDWSource model = new SqlDWSource().withSourceRetryCount("datadbmzwlejiiyoon") - .withSourceRetryWait("dataalr") - .withMaxConcurrentConnections("datazdbntopbabndw") - .withDisableMetricsCollection("datam") - .withQueryTimeout("datalxvaovssibnv") - .withAdditionalColumns("datavi") - .withSqlReaderQuery("datalug") - .withSqlReaderStoredProcedureName("datau") - .withStoredProcedureParameters("dataypliotgtlan") - .withIsolationLevel("datakvlxsycqqdoxooxu") - .withPartitionOption("datafqoobwxctkveq") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datadwmhqcjrery") - .withPartitionUpperBound("databyqxeyzq") - .withPartitionLowerBound("datapsi")); - model = BinaryData.fromObject(model).toObject(SqlDWSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java deleted file mode 100644 index 2ab240d05c1c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlMISourceTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlMISource; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; - -public final class SqlMISourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlMISource model = BinaryData.fromString( - "{\"type\":\"SqlMISource\",\"sqlReaderQuery\":\"datajrfk\",\"sqlReaderStoredProcedureName\":\"datac\",\"storedProcedureParameters\":\"dataelokxklgl\",\"isolationLevel\":\"dataregjo\",\"produceAdditionalTypes\":\"datan\",\"partitionOption\":\"datasbmvrmenrcqi\",\"partitionSettings\":{\"partitionColumnName\":\"datavp\",\"partitionUpperBound\":\"dataduiuvingm\",\"partitionLowerBound\":\"dataqy\"},\"queryTimeout\":\"datayuqdz\",\"additionalColumns\":\"dataojz\",\"sourceRetryCount\":\"dataykfjga\",\"sourceRetryWait\":\"datayscky\",\"maxConcurrentConnections\":\"datayj\",\"disableMetricsCollection\":\"datamfwrqzizggvmuotc\",\"\":{\"rlt\":\"databfyjampvwxlkh\",\"yw\":\"dataipmnqrbyq\",\"wgylolvxw\":\"databowcjkarggvyu\"}}") - .toObject(SqlMISource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlMISource model = new SqlMISource().withSourceRetryCount("dataykfjga") - .withSourceRetryWait("datayscky") - .withMaxConcurrentConnections("datayj") - .withDisableMetricsCollection("datamfwrqzizggvmuotc") - .withQueryTimeout("datayuqdz") - .withAdditionalColumns("dataojz") - .withSqlReaderQuery("datajrfk") - .withSqlReaderStoredProcedureName("datac") - .withStoredProcedureParameters("dataelokxklgl") - .withIsolationLevel("dataregjo") - .withProduceAdditionalTypes("datan") - .withPartitionOption("datasbmvrmenrcqi") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datavp") - .withPartitionUpperBound("dataduiuvingm") - .withPartitionLowerBound("dataqy")); - model = BinaryData.fromObject(model).toObject(SqlMISource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java deleted file mode 100644 index 258931518562..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlPartitionSettingsTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; - -public final class SqlPartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"dataionmokyjmtdn\",\"partitionUpperBound\":\"databeeysk\",\"partitionLowerBound\":\"datalcaklesjgxdhgezy\"}") - .toObject(SqlPartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlPartitionSettings model = new SqlPartitionSettings().withPartitionColumnName("dataionmokyjmtdn") - .withPartitionUpperBound("databeeysk") - .withPartitionLowerBound("datalcaklesjgxdhgezy"); - model = BinaryData.fromObject(model).toObject(SqlPartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerBaseLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerBaseLinkedServiceTypePropertiesTests.java deleted file mode 100644 index 8543c19f41e3..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerBaseLinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlServerBaseLinkedServiceTypeProperties; - -public final class SqlServerBaseLinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerBaseLinkedServiceTypeProperties model = BinaryData.fromString( - "{\"server\":\"dataxoluzntbpcad\",\"database\":\"datax\",\"encrypt\":\"dataxipe\",\"trustServerCertificate\":\"dataplfmfvmjjfzi\",\"hostNameInCertificate\":\"datalbiqq\",\"applicationIntent\":\"dataarxknfvbsym\",\"connectTimeout\":\"databahdbtjm\",\"connectRetryCount\":\"datazonrklbizrxh\",\"connectRetryInterval\":\"datafvpanloqovvcxgq\",\"loadBalanceTimeout\":\"datauirgopgzatucu\",\"commandTimeout\":\"datajuzvyjxux\",\"integratedSecurity\":\"dataquoqhqrcsk\",\"failoverPartner\":\"dataqfhlrvuvd\",\"maxPoolSize\":\"datavyjcdpncvfyeqyod\",\"minPoolSize\":\"datajc\",\"multipleActiveResultSets\":\"datapqhipajs\",\"multiSubnetFailover\":\"datavnmevl\",\"packetSize\":\"datacuwrfgpjfv\",\"pooling\":\"datakseodvlmdzgvc\"}") - .toObject(SqlServerBaseLinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerBaseLinkedServiceTypeProperties model - = new SqlServerBaseLinkedServiceTypeProperties().withServer("dataxoluzntbpcad") - .withDatabase("datax") - .withEncrypt("dataxipe") - .withTrustServerCertificate("dataplfmfvmjjfzi") - .withHostnameInCertificate("datalbiqq") - .withApplicationIntent("dataarxknfvbsym") - .withConnectTimeout("databahdbtjm") - .withConnectRetryCount("datazonrklbizrxh") - .withConnectRetryInterval("datafvpanloqovvcxgq") - .withLoadBalanceTimeout("datauirgopgzatucu") - .withCommandTimeout("datajuzvyjxux") - .withIntegratedSecurity("dataquoqhqrcsk") - .withFailoverPartner("dataqfhlrvuvd") - .withMaxPoolSize("datavyjcdpncvfyeqyod") - .withMinPoolSize("datajc") - .withMultipleActiveResultSets("datapqhipajs") - .withMultiSubnetFailover("datavnmevl") - .withPacketSize("datacuwrfgpjfv") - .withPooling("datakseodvlmdzgvc"); - model = BinaryData.fromObject(model).toObject(SqlServerBaseLinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java deleted file mode 100644 index eef1eb87a3ed..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerSourceTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; -import com.azure.resourcemanager.datafactory.models.SqlServerSource; - -public final class SqlServerSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerSource model = BinaryData.fromString( - "{\"type\":\"SqlServerSource\",\"sqlReaderQuery\":\"datahaokhbqmxgglkq\",\"sqlReaderStoredProcedureName\":\"datapbynetyxuxopoc\",\"storedProcedureParameters\":\"dataffgggglz\",\"isolationLevel\":\"dataouhmngccnkgius\",\"produceAdditionalTypes\":\"dataswcpspaoxig\",\"partitionOption\":\"datai\",\"partitionSettings\":{\"partitionColumnName\":\"datapgpqsmglutn\",\"partitionUpperBound\":\"datanp\",\"partitionLowerBound\":\"dataxnbogxkid\"},\"queryTimeout\":\"dataxbgfwwcfwlwnj\",\"additionalColumns\":\"datanmop\",\"sourceRetryCount\":\"dataetdruugimviefbje\",\"sourceRetryWait\":\"dataiyjkhjuuep\",\"maxConcurrentConnections\":\"datauqwnajb\",\"disableMetricsCollection\":\"dataxqsvax\",\"\":{\"dxu\":\"dataisdwtug\",\"xllhkzunn\":\"dataipgcbwiwhtjo\",\"awxkdvevhyuuih\":\"datamwwx\",\"qmcvu\":\"datap\"}}") - .toObject(SqlServerSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerSource model = new SqlServerSource().withSourceRetryCount("dataetdruugimviefbje") - .withSourceRetryWait("dataiyjkhjuuep") - .withMaxConcurrentConnections("datauqwnajb") - .withDisableMetricsCollection("dataxqsvax") - .withQueryTimeout("dataxbgfwwcfwlwnj") - .withAdditionalColumns("datanmop") - .withSqlReaderQuery("datahaokhbqmxgglkq") - .withSqlReaderStoredProcedureName("datapbynetyxuxopoc") - .withStoredProcedureParameters("dataffgggglz") - .withIsolationLevel("dataouhmngccnkgius") - .withProduceAdditionalTypes("dataswcpspaoxig") - .withPartitionOption("datai") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datapgpqsmglutn") - .withPartitionUpperBound("datanp") - .withPartitionLowerBound("dataxnbogxkid")); - model = BinaryData.fromObject(model).toObject(SqlServerSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java deleted file mode 100644 index ebac6c594860..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTests.java +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.SqlServerStoredProcedureActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SqlServerStoredProcedureActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerStoredProcedureActivity model = BinaryData.fromString( - "{\"type\":\"SqlServerStoredProcedure\",\"typeProperties\":{\"storedProcedureName\":\"datalzk\",\"storedProcedureParameters\":\"datacgtbpbfbgfwjq\"},\"linkedServiceName\":{\"referenceName\":\"vse\",\"parameters\":{\"hrw\":\"dataxxdrgbnq\",\"flq\":\"datamexwg\",\"k\":\"dataugxudsmdglq\"}},\"policy\":{\"timeout\":\"datadjdasomxws\",\"retry\":\"datayl\",\"retryIntervalInSeconds\":45833654,\"secureInput\":false,\"secureOutput\":true,\"\":{\"yzzeqdjx\":\"dataikglmc\",\"zvtwfbqxoqnv\":\"datarbjxkarxvg\",\"pp\":\"datajhdcolnxw\",\"zvi\":\"dataodnntoloezptngr\"}},\"name\":\"xacxcac\",\"description\":\"dkomrpucytjxpdqw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"skyrhsijxm\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"zy\":\"dataxjsuwmbdtfetr\"}},{\"activity\":\"prrapghloemqapv\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\",\"Skipped\"],\"\":{\"wrbqadtvpgu\":\"datasnxue\",\"iv\":\"datatmtgk\",\"plphkiyiqpi\":\"dataxxe\"}}],\"userProperties\":[{\"name\":\"vzjkzslvxdp\",\"value\":\"datapubbwps\"},{\"name\":\"cxbkwm\",\"value\":\"datajqakacbc\"},{\"name\":\"rsnnvlm\",\"value\":\"datagfgtwqmtyfqutmj\"}],\"\":{\"qzdor\":\"datasjurilqc\",\"auwojgvpqzvtgwlz\":\"dataufogockbizqqajs\",\"br\":\"datacyvrbgi\"}}") - .toObject(SqlServerStoredProcedureActivity.class); - Assertions.assertEquals("xacxcac", model.name()); - Assertions.assertEquals("dkomrpucytjxpdqw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("skyrhsijxm", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vzjkzslvxdp", model.userProperties().get(0).name()); - Assertions.assertEquals("vse", model.linkedServiceName().referenceName()); - Assertions.assertEquals(45833654, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerStoredProcedureActivity model - = new SqlServerStoredProcedureActivity().withName("xacxcac") - .withDescription("dkomrpucytjxpdqw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("skyrhsijxm") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("prrapghloemqapv") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("vzjkzslvxdp").withValue("datapubbwps"), - new UserProperty().withName("cxbkwm").withValue("datajqakacbc"), - new UserProperty().withName("rsnnvlm").withValue("datagfgtwqmtyfqutmj"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vse") - .withParameters(mapOf("hrw", "dataxxdrgbnq", "flq", "datamexwg", "k", "dataugxudsmdglq"))) - .withPolicy(new ActivityPolicy().withTimeout("datadjdasomxws") - .withRetry("datayl") - .withRetryIntervalInSeconds(45833654) - .withSecureInput(false) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withStoredProcedureName("datalzk") - .withStoredProcedureParameters("datacgtbpbfbgfwjq"); - model = BinaryData.fromObject(model).toObject(SqlServerStoredProcedureActivity.class); - Assertions.assertEquals("xacxcac", model.name()); - Assertions.assertEquals("dkomrpucytjxpdqw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("skyrhsijxm", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vzjkzslvxdp", model.userProperties().get(0).name()); - Assertions.assertEquals("vse", model.linkedServiceName().referenceName()); - Assertions.assertEquals(45833654, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(false, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java deleted file mode 100644 index 4f24858fd02f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerStoredProcedureActivityTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SqlServerStoredProcedureActivityTypeProperties; - -public final class SqlServerStoredProcedureActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerStoredProcedureActivityTypeProperties model = BinaryData - .fromString("{\"storedProcedureName\":\"dataekjbljfk\",\"storedProcedureParameters\":\"databnnmzzcbc\"}") - .toObject(SqlServerStoredProcedureActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerStoredProcedureActivityTypeProperties model - = new SqlServerStoredProcedureActivityTypeProperties().withStoredProcedureName("dataekjbljfk") - .withStoredProcedureParameters("databnnmzzcbc"); - model = BinaryData.fromObject(model).toObject(SqlServerStoredProcedureActivityTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java deleted file mode 100644 index eccb245cfc44..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SqlServerTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SqlServerTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerTableDataset model = BinaryData.fromString( - "{\"type\":\"SqlServerTable\",\"typeProperties\":{\"tableName\":\"databd\",\"schema\":\"datarwkampyhw\",\"table\":\"dataldzniudrcycm\"},\"description\":\"uzymhlhihqknlv\",\"structure\":\"datan\",\"schema\":\"datakopaiildcp\",\"linkedServiceName\":{\"referenceName\":\"dkhqux\",\"parameters\":{\"gybuxmqxigid\":\"dataofpgvedrobujn\",\"pskgrhn\":\"datallnjg\"}},\"parameters\":{\"yqz\":{\"type\":\"String\",\"defaultValue\":\"datasazuqznghxh\"},\"vnrvzwpffx\":{\"type\":\"String\",\"defaultValue\":\"datak\"}},\"annotations\":[\"databntmvehohflyuv\"],\"folder\":{\"name\":\"zqzqweuydybnairv\"},\"\":{\"zeogeatr\":\"datasv\",\"rfcs\":\"datanqnvnc\",\"zsqbibaaugicovj\":\"datavjnkoiz\"}}") - .toObject(SqlServerTableDataset.class); - Assertions.assertEquals("uzymhlhihqknlv", model.description()); - Assertions.assertEquals("dkhqux", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("yqz").type()); - Assertions.assertEquals("zqzqweuydybnairv", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerTableDataset model = new SqlServerTableDataset().withDescription("uzymhlhihqknlv") - .withStructure("datan") - .withSchema("datakopaiildcp") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("dkhqux") - .withParameters(mapOf("gybuxmqxigid", "dataofpgvedrobujn", "pskgrhn", "datallnjg"))) - .withParameters(mapOf("yqz", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datasazuqznghxh"), - "vnrvzwpffx", new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datak"))) - .withAnnotations(Arrays.asList("databntmvehohflyuv")) - .withFolder(new DatasetFolder().withName("zqzqweuydybnairv")) - .withTableName("databd") - .withSchemaTypePropertiesSchema("datarwkampyhw") - .withTable("dataldzniudrcycm"); - model = BinaryData.fromObject(model).toObject(SqlServerTableDataset.class); - Assertions.assertEquals("uzymhlhihqknlv", model.description()); - Assertions.assertEquals("dkhqux", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("yqz").type()); - Assertions.assertEquals("zqzqweuydybnairv", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java deleted file mode 100644 index bd850f5924c1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlServerTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SqlServerTableDatasetTypeProperties; - -public final class SqlServerTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlServerTableDatasetTypeProperties model = BinaryData - .fromString("{\"tableName\":\"datavwrmjx\",\"schema\":\"datauod\",\"table\":\"dataczbassqfyylwpp\"}") - .toObject(SqlServerTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlServerTableDatasetTypeProperties model - = new SqlServerTableDatasetTypeProperties().withTableName("datavwrmjx") - .withSchema("datauod") - .withTable("dataczbassqfyylwpp"); - model = BinaryData.fromObject(model).toObject(SqlServerTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java deleted file mode 100644 index 71c390aea480..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SqlSourceTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; -import com.azure.resourcemanager.datafactory.models.SqlSource; - -public final class SqlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SqlSource model = BinaryData.fromString( - "{\"type\":\"SqlSource\",\"sqlReaderQuery\":\"datadqvqfldaqoaopz\",\"sqlReaderStoredProcedureName\":\"datafw\",\"storedProcedureParameters\":\"datad\",\"isolationLevel\":\"dataxformfeedfqjne\",\"partitionOption\":\"dataxtesnhxphxo\",\"partitionSettings\":{\"partitionColumnName\":\"datavtpqttusuxxbzmpv\",\"partitionUpperBound\":\"dataijrnnwgrxzcn\",\"partitionLowerBound\":\"datauezxluimkwbwmg\"},\"queryTimeout\":\"dataqlsn\",\"additionalColumns\":\"datachpcjztziu\",\"sourceRetryCount\":\"datayvpcfvi\",\"sourceRetryWait\":\"dataxciunet\",\"maxConcurrentConnections\":\"datagd\",\"disableMetricsCollection\":\"datakletlwa\",\"\":{\"ftjdr\":\"databphxx\"}}") - .toObject(SqlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SqlSource model = new SqlSource().withSourceRetryCount("datayvpcfvi") - .withSourceRetryWait("dataxciunet") - .withMaxConcurrentConnections("datagd") - .withDisableMetricsCollection("datakletlwa") - .withQueryTimeout("dataqlsn") - .withAdditionalColumns("datachpcjztziu") - .withSqlReaderQuery("datadqvqfldaqoaopz") - .withSqlReaderStoredProcedureName("datafw") - .withStoredProcedureParameters("datad") - .withIsolationLevel("dataxformfeedfqjne") - .withPartitionOption("dataxtesnhxphxo") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datavtpqttusuxxbzmpv") - .withPartitionUpperBound("dataijrnnwgrxzcn") - .withPartitionLowerBound("datauezxluimkwbwmg")); - model = BinaryData.fromObject(model).toObject(SqlSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java deleted file mode 100644 index 829974be16ed..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareObjectDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SquareObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SquareObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SquareObjectDataset model = BinaryData.fromString( - "{\"type\":\"SquareObject\",\"typeProperties\":{\"tableName\":\"dataqnneqrypyurvs\"},\"description\":\"ovtuer\",\"structure\":\"datazhbwcxyb\",\"schema\":\"datazycxhaoegjzg\",\"linkedServiceName\":{\"referenceName\":\"ljbnwczsrazcb\",\"parameters\":{\"uapasizzfmugykw\":\"datacqhxhj\",\"gthdzi\":\"dataycuotennd\",\"erwjqvswtwo\":\"datajzffp\"}},\"parameters\":{\"rrsguogk\":{\"type\":\"Object\",\"defaultValue\":\"datamzxvfybxmmrvnuvq\"},\"ftvvqtmvif\":{\"type\":\"Float\",\"defaultValue\":\"dataotpyabensjflw\"},\"xgosnxa\":{\"type\":\"Int\",\"defaultValue\":\"datasimalbmti\"},\"nlsbnucqxhpaqo\":{\"type\":\"Array\",\"defaultValue\":\"datacdfmzxaoxlhmvjc\"}},\"annotations\":[\"dataejoysoxovlznk\",\"dataeldkqdlqqhn\"],\"folder\":{\"name\":\"ykkxa\"},\"\":{\"pxiema\":\"datauptiicgvpzgyxcc\",\"tfmocn\":\"datamztjekxsnnbrysg\"}}") - .toObject(SquareObjectDataset.class); - Assertions.assertEquals("ovtuer", model.description()); - Assertions.assertEquals("ljbnwczsrazcb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("rrsguogk").type()); - Assertions.assertEquals("ykkxa", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SquareObjectDataset model = new SquareObjectDataset().withDescription("ovtuer") - .withStructure("datazhbwcxyb") - .withSchema("datazycxhaoegjzg") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ljbnwczsrazcb") - .withParameters( - mapOf("uapasizzfmugykw", "datacqhxhj", "gthdzi", "dataycuotennd", "erwjqvswtwo", "datajzffp"))) - .withParameters(mapOf("rrsguogk", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datamzxvfybxmmrvnuvq"), - "ftvvqtmvif", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("dataotpyabensjflw"), - "xgosnxa", new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("datasimalbmti"), - "nlsbnucqxhpaqo", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datacdfmzxaoxlhmvjc"))) - .withAnnotations(Arrays.asList("dataejoysoxovlznk", "dataeldkqdlqqhn")) - .withFolder(new DatasetFolder().withName("ykkxa")) - .withTableName("dataqnneqrypyurvs"); - model = BinaryData.fromObject(model).toObject(SquareObjectDataset.class); - Assertions.assertEquals("ovtuer", model.description()); - Assertions.assertEquals("ljbnwczsrazcb", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.OBJECT, model.parameters().get("rrsguogk").type()); - Assertions.assertEquals("ykkxa", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java deleted file mode 100644 index 9342483103ab..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SquareSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SquareSource; - -public final class SquareSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SquareSource model = BinaryData.fromString( - "{\"type\":\"SquareSource\",\"query\":\"datalp\",\"queryTimeout\":\"dataclkbwkmwdrvkb\",\"additionalColumns\":\"datavnnvk\",\"sourceRetryCount\":\"datazldzzjj\",\"sourceRetryWait\":\"datahjqengopdvnzn\",\"maxConcurrentConnections\":\"dataiodaj\",\"disableMetricsCollection\":\"dataszdyv\",\"\":{\"awh\":\"dataufbwr\",\"uee\":\"datazhs\"}}") - .toObject(SquareSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SquareSource model = new SquareSource().withSourceRetryCount("datazldzzjj") - .withSourceRetryWait("datahjqengopdvnzn") - .withMaxConcurrentConnections("dataiodaj") - .withDisableMetricsCollection("dataszdyv") - .withQueryTimeout("dataclkbwkmwdrvkb") - .withAdditionalColumns("datavnnvk") - .withQuery("datalp"); - model = BinaryData.fromObject(model).toObject(SquareSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java deleted file mode 100644 index 370f29f64f03..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisChildPackageTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisChildPackage; -import org.junit.jupiter.api.Assertions; - -public final class SsisChildPackageTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisChildPackage model = BinaryData.fromString( - "{\"packagePath\":\"datayjox\",\"packageName\":\"lcyflzuztdwxr\",\"packageContent\":\"dataambzprhpwwarz\",\"packageLastModifiedDate\":\"bbwtagxhriru\"}") - .toObject(SsisChildPackage.class); - Assertions.assertEquals("lcyflzuztdwxr", model.packageName()); - Assertions.assertEquals("bbwtagxhriru", model.packageLastModifiedDate()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisChildPackage model = new SsisChildPackage().withPackagePath("datayjox") - .withPackageName("lcyflzuztdwxr") - .withPackageContent("dataambzprhpwwarz") - .withPackageLastModifiedDate("bbwtagxhriru"); - model = BinaryData.fromObject(model).toObject(SsisChildPackage.class); - Assertions.assertEquals("lcyflzuztdwxr", model.packageName()); - Assertions.assertEquals("bbwtagxhriru", model.packageLastModifiedDate()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java deleted file mode 100644 index 66c323debae1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentReferenceTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisEnvironmentReference; -import org.junit.jupiter.api.Assertions; - -public final class SsisEnvironmentReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisEnvironmentReference model = BinaryData.fromString( - "{\"id\":1335480281745570557,\"environmentFolderName\":\"hvlxudheka\",\"environmentName\":\"irmid\",\"referenceType\":\"hjcgszfbqygkxrl\"}") - .toObject(SsisEnvironmentReference.class); - Assertions.assertEquals(1335480281745570557L, model.id()); - Assertions.assertEquals("hvlxudheka", model.environmentFolderName()); - Assertions.assertEquals("irmid", model.environmentName()); - Assertions.assertEquals("hjcgszfbqygkxrl", model.referenceType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisEnvironmentReference model = new SsisEnvironmentReference().withId(1335480281745570557L) - .withEnvironmentFolderName("hvlxudheka") - .withEnvironmentName("irmid") - .withReferenceType("hjcgszfbqygkxrl"); - model = BinaryData.fromObject(model).toObject(SsisEnvironmentReference.class); - Assertions.assertEquals(1335480281745570557L, model.id()); - Assertions.assertEquals("hvlxudheka", model.environmentFolderName()); - Assertions.assertEquals("irmid", model.environmentName()); - Assertions.assertEquals("hjcgszfbqygkxrl", model.referenceType()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java deleted file mode 100644 index 7051cdf2e3aa..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisEnvironmentTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisEnvironment; -import com.azure.resourcemanager.datafactory.models.SsisVariable; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisEnvironmentTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisEnvironment model = BinaryData.fromString( - "{\"type\":\"Environment\",\"folderId\":2091655788926974870,\"variables\":[{\"id\":1139935650998469771,\"name\":\"muxppwpcfmgrmt\",\"description\":\"hzbbdwrjenco\",\"dataType\":\"ii\",\"sensitive\":false,\"value\":\"dtpljonmaj\",\"sensitiveValue\":\"bxxcdkhxjwtkftg\"}],\"id\":611124156830775043,\"name\":\"p\",\"description\":\"wsjpgb\"}") - .toObject(SsisEnvironment.class); - Assertions.assertEquals(611124156830775043L, model.id()); - Assertions.assertEquals("p", model.name()); - Assertions.assertEquals("wsjpgb", model.description()); - Assertions.assertEquals(2091655788926974870L, model.folderId()); - Assertions.assertEquals(1139935650998469771L, model.variables().get(0).id()); - Assertions.assertEquals("muxppwpcfmgrmt", model.variables().get(0).name()); - Assertions.assertEquals("hzbbdwrjenco", model.variables().get(0).description()); - Assertions.assertEquals("ii", model.variables().get(0).dataType()); - Assertions.assertEquals(false, model.variables().get(0).sensitive()); - Assertions.assertEquals("dtpljonmaj", model.variables().get(0).value()); - Assertions.assertEquals("bxxcdkhxjwtkftg", model.variables().get(0).sensitiveValue()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisEnvironment model = new SsisEnvironment().withId(611124156830775043L) - .withName("p") - .withDescription("wsjpgb") - .withFolderId(2091655788926974870L) - .withVariables(Arrays.asList(new SsisVariable().withId(1139935650998469771L) - .withName("muxppwpcfmgrmt") - .withDescription("hzbbdwrjenco") - .withDataType("ii") - .withSensitive(false) - .withValue("dtpljonmaj") - .withSensitiveValue("bxxcdkhxjwtkftg"))); - model = BinaryData.fromObject(model).toObject(SsisEnvironment.class); - Assertions.assertEquals(611124156830775043L, model.id()); - Assertions.assertEquals("p", model.name()); - Assertions.assertEquals("wsjpgb", model.description()); - Assertions.assertEquals(2091655788926974870L, model.folderId()); - Assertions.assertEquals(1139935650998469771L, model.variables().get(0).id()); - Assertions.assertEquals("muxppwpcfmgrmt", model.variables().get(0).name()); - Assertions.assertEquals("hzbbdwrjenco", model.variables().get(0).description()); - Assertions.assertEquals("ii", model.variables().get(0).dataType()); - Assertions.assertEquals(false, model.variables().get(0).sensitive()); - Assertions.assertEquals("dtpljonmaj", model.variables().get(0).value()); - Assertions.assertEquals("bxxcdkhxjwtkftg", model.variables().get(0).sensitiveValue()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java deleted file mode 100644 index de02885d399f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisExecutionParameterTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisExecutionParameter; - -public final class SsisExecutionParameterTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisExecutionParameter model - = BinaryData.fromString("{\"value\":\"datasmyisndfrfhgow\"}").toObject(SsisExecutionParameter.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisExecutionParameter model = new SsisExecutionParameter().withValue("datasmyisndfrfhgow"); - model = BinaryData.fromObject(model).toObject(SsisExecutionParameter.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java deleted file mode 100644 index bafa0c1773da..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisFolderTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisFolder; -import org.junit.jupiter.api.Assertions; - -public final class SsisFolderTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisFolder model = BinaryData - .fromString( - "{\"type\":\"Folder\",\"id\":675766868795663943,\"name\":\"mkqbylbbnjldicq\",\"description\":\"a\"}") - .toObject(SsisFolder.class); - Assertions.assertEquals(675766868795663943L, model.id()); - Assertions.assertEquals("mkqbylbbnjldicq", model.name()); - Assertions.assertEquals("a", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisFolder model - = new SsisFolder().withId(675766868795663943L).withName("mkqbylbbnjldicq").withDescription("a"); - model = BinaryData.fromObject(model).toObject(SsisFolder.class); - Assertions.assertEquals(675766868795663943L, model.id()); - Assertions.assertEquals("mkqbylbbnjldicq", model.name()); - Assertions.assertEquals("a", model.description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java deleted file mode 100644 index bbeb7e51a343..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataListResponseInnerTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SsisObjectMetadataListResponseInner; -import com.azure.resourcemanager.datafactory.models.SsisObjectMetadata; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisObjectMetadataListResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisObjectMetadataListResponseInner model = BinaryData.fromString( - "{\"value\":[{\"type\":\"SsisObjectMetadata\",\"id\":1248291372184779781,\"name\":\"zvlvqhjkbegib\",\"description\":\"mxiebw\"},{\"type\":\"SsisObjectMetadata\",\"id\":9167362709610232735,\"name\":\"yqcgwrtzjuzgwy\",\"description\":\"htxongmtsavjc\"},{\"type\":\"SsisObjectMetadata\",\"id\":7752304449285326809,\"name\":\"p\",\"description\":\"knftguvriuh\"}],\"nextLink\":\"wmdyvxqtay\"}") - .toObject(SsisObjectMetadataListResponseInner.class); - Assertions.assertEquals(1248291372184779781L, model.value().get(0).id()); - Assertions.assertEquals("zvlvqhjkbegib", model.value().get(0).name()); - Assertions.assertEquals("mxiebw", model.value().get(0).description()); - Assertions.assertEquals("wmdyvxqtay", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisObjectMetadataListResponseInner model = new SsisObjectMetadataListResponseInner().withValue(Arrays.asList( - new SsisObjectMetadata().withId(1248291372184779781L).withName("zvlvqhjkbegib").withDescription("mxiebw"), - new SsisObjectMetadata().withId(9167362709610232735L) - .withName("yqcgwrtzjuzgwy") - .withDescription("htxongmtsavjc"), - new SsisObjectMetadata().withId(7752304449285326809L).withName("p").withDescription("knftguvriuh"))) - .withNextLink("wmdyvxqtay"); - model = BinaryData.fromObject(model).toObject(SsisObjectMetadataListResponseInner.class); - Assertions.assertEquals(1248291372184779781L, model.value().get(0).id()); - Assertions.assertEquals("zvlvqhjkbegib", model.value().get(0).name()); - Assertions.assertEquals("mxiebw", model.value().get(0).description()); - Assertions.assertEquals("wmdyvxqtay", model.nextLink()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java deleted file mode 100644 index 0b66b4b42fb8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataStatusResponseInnerTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SsisObjectMetadataStatusResponseInner; -import org.junit.jupiter.api.Assertions; - -public final class SsisObjectMetadataStatusResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisObjectMetadataStatusResponseInner model = BinaryData - .fromString( - "{\"status\":\"tdhxujznbmpowuwp\",\"name\":\"qlveualupjmkh\",\"properties\":\"obbc\",\"error\":\"s\"}") - .toObject(SsisObjectMetadataStatusResponseInner.class); - Assertions.assertEquals("tdhxujznbmpowuwp", model.status()); - Assertions.assertEquals("qlveualupjmkh", model.name()); - Assertions.assertEquals("obbc", model.properties()); - Assertions.assertEquals("s", model.error()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisObjectMetadataStatusResponseInner model - = new SsisObjectMetadataStatusResponseInner().withStatus("tdhxujznbmpowuwp") - .withName("qlveualupjmkh") - .withProperties("obbc") - .withError("s"); - model = BinaryData.fromObject(model).toObject(SsisObjectMetadataStatusResponseInner.class); - Assertions.assertEquals("tdhxujznbmpowuwp", model.status()); - Assertions.assertEquals("qlveualupjmkh", model.name()); - Assertions.assertEquals("obbc", model.properties()); - Assertions.assertEquals("s", model.error()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java deleted file mode 100644 index 35212259da5e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisObjectMetadataTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisObjectMetadata; -import org.junit.jupiter.api.Assertions; - -public final class SsisObjectMetadataTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisObjectMetadata model = BinaryData.fromString( - "{\"type\":\"SsisObjectMetadata\",\"id\":1536045625796396557,\"name\":\"oyq\",\"description\":\"xrmcqibycnojvk\"}") - .toObject(SsisObjectMetadata.class); - Assertions.assertEquals(1536045625796396557L, model.id()); - Assertions.assertEquals("oyq", model.name()); - Assertions.assertEquals("xrmcqibycnojvk", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisObjectMetadata model - = new SsisObjectMetadata().withId(1536045625796396557L).withName("oyq").withDescription("xrmcqibycnojvk"); - model = BinaryData.fromObject(model).toObject(SsisObjectMetadata.class); - Assertions.assertEquals(1536045625796396557L, model.id()); - Assertions.assertEquals("oyq", model.name()); - Assertions.assertEquals("xrmcqibycnojvk", model.description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java deleted file mode 100644 index 48ff97095f8d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPackageTests.java +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisPackage; -import com.azure.resourcemanager.datafactory.models.SsisParameter; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisPackageTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisPackage model = BinaryData.fromString( - "{\"type\":\"Package\",\"folderId\":3211289839298869603,\"projectVersion\":515454210949895163,\"projectId\":811846710701875594,\"parameters\":[{\"id\":7450103083846487889,\"name\":\"fjidqod\",\"description\":\"eogavfyihuz\",\"dataType\":\"pwnyfjcypazwiimd\",\"required\":false,\"sensitive\":false,\"designDefaultValue\":\"agrlwpamesitwqa\",\"defaultValue\":\"whuwxkyxi\",\"sensitiveDefaultValue\":\"f\",\"valueType\":\"rnai\",\"valueSet\":true,\"variable\":\"ccprkiyf\"},{\"id\":4424712053033601989,\"name\":\"mydx\",\"description\":\"uqbvfq\",\"dataType\":\"ihirqvvketydga\",\"required\":true,\"sensitive\":false,\"designDefaultValue\":\"gvqxe\",\"defaultValue\":\"rmh\",\"sensitiveDefaultValue\":\"aqgbb\",\"valueType\":\"vihylrxsiyzsyium\",\"valueSet\":true,\"variable\":\"q\"}],\"id\":7199022220193379982,\"name\":\"rn\",\"description\":\"qpkayqivbigdrqg\"}") - .toObject(SsisPackage.class); - Assertions.assertEquals(7199022220193379982L, model.id()); - Assertions.assertEquals("rn", model.name()); - Assertions.assertEquals("qpkayqivbigdrqg", model.description()); - Assertions.assertEquals(3211289839298869603L, model.folderId()); - Assertions.assertEquals(515454210949895163L, model.projectVersion()); - Assertions.assertEquals(811846710701875594L, model.projectId()); - Assertions.assertEquals(7450103083846487889L, model.parameters().get(0).id()); - Assertions.assertEquals("fjidqod", model.parameters().get(0).name()); - Assertions.assertEquals("eogavfyihuz", model.parameters().get(0).description()); - Assertions.assertEquals("pwnyfjcypazwiimd", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("agrlwpamesitwqa", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("whuwxkyxi", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("f", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("rnai", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("ccprkiyf", model.parameters().get(0).variable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisPackage model = new SsisPackage().withId(7199022220193379982L) - .withName("rn") - .withDescription("qpkayqivbigdrqg") - .withFolderId(3211289839298869603L) - .withProjectVersion(515454210949895163L) - .withProjectId(811846710701875594L) - .withParameters(Arrays.asList( - new SsisParameter().withId(7450103083846487889L) - .withName("fjidqod") - .withDescription("eogavfyihuz") - .withDataType("pwnyfjcypazwiimd") - .withRequired(false) - .withSensitive(false) - .withDesignDefaultValue("agrlwpamesitwqa") - .withDefaultValue("whuwxkyxi") - .withSensitiveDefaultValue("f") - .withValueType("rnai") - .withValueSet(true) - .withVariable("ccprkiyf"), - new SsisParameter().withId(4424712053033601989L) - .withName("mydx") - .withDescription("uqbvfq") - .withDataType("ihirqvvketydga") - .withRequired(true) - .withSensitive(false) - .withDesignDefaultValue("gvqxe") - .withDefaultValue("rmh") - .withSensitiveDefaultValue("aqgbb") - .withValueType("vihylrxsiyzsyium") - .withValueSet(true) - .withVariable("q"))); - model = BinaryData.fromObject(model).toObject(SsisPackage.class); - Assertions.assertEquals(7199022220193379982L, model.id()); - Assertions.assertEquals("rn", model.name()); - Assertions.assertEquals("qpkayqivbigdrqg", model.description()); - Assertions.assertEquals(3211289839298869603L, model.folderId()); - Assertions.assertEquals(515454210949895163L, model.projectVersion()); - Assertions.assertEquals(811846710701875594L, model.projectId()); - Assertions.assertEquals(7450103083846487889L, model.parameters().get(0).id()); - Assertions.assertEquals("fjidqod", model.parameters().get(0).name()); - Assertions.assertEquals("eogavfyihuz", model.parameters().get(0).description()); - Assertions.assertEquals("pwnyfjcypazwiimd", model.parameters().get(0).dataType()); - Assertions.assertEquals(false, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("agrlwpamesitwqa", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("whuwxkyxi", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("f", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("rnai", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("ccprkiyf", model.parameters().get(0).variable()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java deleted file mode 100644 index 72f3421bae39..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisParameterTests.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisParameter; -import org.junit.jupiter.api.Assertions; - -public final class SsisParameterTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisParameter model = BinaryData.fromString( - "{\"id\":8191940074280365159,\"name\":\"lpumveybodhrvyk\",\"description\":\"umwbcu\",\"dataType\":\"vegpdxts\",\"required\":false,\"sensitive\":false,\"designDefaultValue\":\"ojybolqoxuptsli\",\"defaultValue\":\"mlkwk\",\"sensitiveDefaultValue\":\"invamtykxsz\",\"valueType\":\"kfxcsqmz\",\"valueSet\":true,\"variable\":\"tkdpczeohplrgcnb\"}") - .toObject(SsisParameter.class); - Assertions.assertEquals(8191940074280365159L, model.id()); - Assertions.assertEquals("lpumveybodhrvyk", model.name()); - Assertions.assertEquals("umwbcu", model.description()); - Assertions.assertEquals("vegpdxts", model.dataType()); - Assertions.assertEquals(false, model.required()); - Assertions.assertEquals(false, model.sensitive()); - Assertions.assertEquals("ojybolqoxuptsli", model.designDefaultValue()); - Assertions.assertEquals("mlkwk", model.defaultValue()); - Assertions.assertEquals("invamtykxsz", model.sensitiveDefaultValue()); - Assertions.assertEquals("kfxcsqmz", model.valueType()); - Assertions.assertEquals(true, model.valueSet()); - Assertions.assertEquals("tkdpczeohplrgcnb", model.variable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisParameter model = new SsisParameter().withId(8191940074280365159L) - .withName("lpumveybodhrvyk") - .withDescription("umwbcu") - .withDataType("vegpdxts") - .withRequired(false) - .withSensitive(false) - .withDesignDefaultValue("ojybolqoxuptsli") - .withDefaultValue("mlkwk") - .withSensitiveDefaultValue("invamtykxsz") - .withValueType("kfxcsqmz") - .withValueSet(true) - .withVariable("tkdpczeohplrgcnb"); - model = BinaryData.fromObject(model).toObject(SsisParameter.class); - Assertions.assertEquals(8191940074280365159L, model.id()); - Assertions.assertEquals("lpumveybodhrvyk", model.name()); - Assertions.assertEquals("umwbcu", model.description()); - Assertions.assertEquals("vegpdxts", model.dataType()); - Assertions.assertEquals(false, model.required()); - Assertions.assertEquals(false, model.sensitive()); - Assertions.assertEquals("ojybolqoxuptsli", model.designDefaultValue()); - Assertions.assertEquals("mlkwk", model.defaultValue()); - Assertions.assertEquals("invamtykxsz", model.sensitiveDefaultValue()); - Assertions.assertEquals("kfxcsqmz", model.valueType()); - Assertions.assertEquals(true, model.valueSet()); - Assertions.assertEquals("tkdpczeohplrgcnb", model.variable()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java deleted file mode 100644 index eee737971fbf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisProjectTests.java +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisEnvironmentReference; -import com.azure.resourcemanager.datafactory.models.SsisParameter; -import com.azure.resourcemanager.datafactory.models.SsisProject; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class SsisProjectTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisProject model = BinaryData.fromString( - "{\"type\":\"Project\",\"folderId\":2361606905945587750,\"version\":2410957260695908721,\"environmentRefs\":[{\"id\":6344400957325352853,\"environmentFolderName\":\"hfoyfzoidyaeprev\",\"environmentName\":\"ukfalwceechca\",\"referenceType\":\"qbeqpucnu\"},{\"id\":1537545616106989572,\"environmentFolderName\":\"hicrjriy\",\"environmentName\":\"bydrlqllb\",\"referenceType\":\"sn\"},{\"id\":4343829442838622501,\"environmentFolderName\":\"rhxgik\",\"environmentName\":\"lugse\",\"referenceType\":\"bro\"},{\"id\":5130213256158503543,\"environmentFolderName\":\"mzkuxdgpksgotb\",\"environmentName\":\"vnjql\",\"referenceType\":\"qqvcugusqlxlxedt\"}],\"parameters\":[{\"id\":2404413554408860527,\"name\":\"acbyfisbln\",\"description\":\"puyypaggpaiheaey\",\"dataType\":\"loqrmgd\",\"required\":true,\"sensitive\":false,\"designDefaultValue\":\"lxtywukhj\",\"defaultValue\":\"pllitx\",\"sensitiveDefaultValue\":\"gkw\",\"valueType\":\"oyhqoivxcodwkwo\",\"valueSet\":true,\"variable\":\"chh\"},{\"id\":439792263271871995,\"name\":\"ewlfwfiikqcdnzs\",\"description\":\"uhgneoodmcrxl\",\"dataType\":\"oa\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"kr\",\"defaultValue\":\"jwnbrmdwtbrnl\",\"sensitiveDefaultValue\":\"iaani\",\"valueType\":\"cjhatclpphc\",\"valueSet\":false,\"variable\":\"ycpanapfaoiz\"},{\"id\":3615186402958789259,\"name\":\"xngzzxqbgqnzmzc\",\"description\":\"xzjkpifpucvbd\",\"dataType\":\"w\",\"required\":false,\"sensitive\":false,\"designDefaultValue\":\"fz\",\"defaultValue\":\"mbinpxmiwt\",\"sensitiveDefaultValue\":\"ifpvrdukcdnzo\",\"valueType\":\"abux\",\"valueSet\":false,\"variable\":\"awshramqsugqcglm\"},{\"id\":3863814695539761286,\"name\":\"ofxvqlauuagwa\",\"description\":\"mcer\",\"dataType\":\"feiqb\",\"required\":false,\"sensitive\":true,\"designDefaultValue\":\"jipssvnonijcq\",\"defaultValue\":\"ozzjkugpd\",\"sensitiveDefaultValue\":\"bto\",\"valueType\":\"ocuzxllbpwa\",\"valueSet\":true,\"variable\":\"tt\"}],\"id\":7823582354412258300,\"name\":\"wxyvtkzbhiz\",\"description\":\"usddmwnfhmjusu\"}") - .toObject(SsisProject.class); - Assertions.assertEquals(7823582354412258300L, model.id()); - Assertions.assertEquals("wxyvtkzbhiz", model.name()); - Assertions.assertEquals("usddmwnfhmjusu", model.description()); - Assertions.assertEquals(2361606905945587750L, model.folderId()); - Assertions.assertEquals(2410957260695908721L, model.version()); - Assertions.assertEquals(6344400957325352853L, model.environmentRefs().get(0).id()); - Assertions.assertEquals("hfoyfzoidyaeprev", model.environmentRefs().get(0).environmentFolderName()); - Assertions.assertEquals("ukfalwceechca", model.environmentRefs().get(0).environmentName()); - Assertions.assertEquals("qbeqpucnu", model.environmentRefs().get(0).referenceType()); - Assertions.assertEquals(2404413554408860527L, model.parameters().get(0).id()); - Assertions.assertEquals("acbyfisbln", model.parameters().get(0).name()); - Assertions.assertEquals("puyypaggpaiheaey", model.parameters().get(0).description()); - Assertions.assertEquals("loqrmgd", model.parameters().get(0).dataType()); - Assertions.assertEquals(true, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("lxtywukhj", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("pllitx", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("gkw", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("oyhqoivxcodwkwo", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("chh", model.parameters().get(0).variable()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisProject model = new SsisProject().withId(7823582354412258300L) - .withName("wxyvtkzbhiz") - .withDescription("usddmwnfhmjusu") - .withFolderId(2361606905945587750L) - .withVersion(2410957260695908721L) - .withEnvironmentRefs(Arrays.asList( - new SsisEnvironmentReference().withId(6344400957325352853L) - .withEnvironmentFolderName("hfoyfzoidyaeprev") - .withEnvironmentName("ukfalwceechca") - .withReferenceType("qbeqpucnu"), - new SsisEnvironmentReference().withId(1537545616106989572L) - .withEnvironmentFolderName("hicrjriy") - .withEnvironmentName("bydrlqllb") - .withReferenceType("sn"), - new SsisEnvironmentReference().withId(4343829442838622501L) - .withEnvironmentFolderName("rhxgik") - .withEnvironmentName("lugse") - .withReferenceType("bro"), - new SsisEnvironmentReference().withId(5130213256158503543L) - .withEnvironmentFolderName("mzkuxdgpksgotb") - .withEnvironmentName("vnjql") - .withReferenceType("qqvcugusqlxlxedt"))) - .withParameters(Arrays.asList( - new SsisParameter().withId(2404413554408860527L) - .withName("acbyfisbln") - .withDescription("puyypaggpaiheaey") - .withDataType("loqrmgd") - .withRequired(true) - .withSensitive(false) - .withDesignDefaultValue("lxtywukhj") - .withDefaultValue("pllitx") - .withSensitiveDefaultValue("gkw") - .withValueType("oyhqoivxcodwkwo") - .withValueSet(true) - .withVariable("chh"), - new SsisParameter().withId(439792263271871995L) - .withName("ewlfwfiikqcdnzs") - .withDescription("uhgneoodmcrxl") - .withDataType("oa") - .withRequired(false) - .withSensitive(true) - .withDesignDefaultValue("kr") - .withDefaultValue("jwnbrmdwtbrnl") - .withSensitiveDefaultValue("iaani") - .withValueType("cjhatclpphc") - .withValueSet(false) - .withVariable("ycpanapfaoiz"), - new SsisParameter().withId(3615186402958789259L) - .withName("xngzzxqbgqnzmzc") - .withDescription("xzjkpifpucvbd") - .withDataType("w") - .withRequired(false) - .withSensitive(false) - .withDesignDefaultValue("fz") - .withDefaultValue("mbinpxmiwt") - .withSensitiveDefaultValue("ifpvrdukcdnzo") - .withValueType("abux") - .withValueSet(false) - .withVariable("awshramqsugqcglm"), - new SsisParameter().withId(3863814695539761286L) - .withName("ofxvqlauuagwa") - .withDescription("mcer") - .withDataType("feiqb") - .withRequired(false) - .withSensitive(true) - .withDesignDefaultValue("jipssvnonijcq") - .withDefaultValue("ozzjkugpd") - .withSensitiveDefaultValue("bto") - .withValueType("ocuzxllbpwa") - .withValueSet(true) - .withVariable("tt"))); - model = BinaryData.fromObject(model).toObject(SsisProject.class); - Assertions.assertEquals(7823582354412258300L, model.id()); - Assertions.assertEquals("wxyvtkzbhiz", model.name()); - Assertions.assertEquals("usddmwnfhmjusu", model.description()); - Assertions.assertEquals(2361606905945587750L, model.folderId()); - Assertions.assertEquals(2410957260695908721L, model.version()); - Assertions.assertEquals(6344400957325352853L, model.environmentRefs().get(0).id()); - Assertions.assertEquals("hfoyfzoidyaeprev", model.environmentRefs().get(0).environmentFolderName()); - Assertions.assertEquals("ukfalwceechca", model.environmentRefs().get(0).environmentName()); - Assertions.assertEquals("qbeqpucnu", model.environmentRefs().get(0).referenceType()); - Assertions.assertEquals(2404413554408860527L, model.parameters().get(0).id()); - Assertions.assertEquals("acbyfisbln", model.parameters().get(0).name()); - Assertions.assertEquals("puyypaggpaiheaey", model.parameters().get(0).description()); - Assertions.assertEquals("loqrmgd", model.parameters().get(0).dataType()); - Assertions.assertEquals(true, model.parameters().get(0).required()); - Assertions.assertEquals(false, model.parameters().get(0).sensitive()); - Assertions.assertEquals("lxtywukhj", model.parameters().get(0).designDefaultValue()); - Assertions.assertEquals("pllitx", model.parameters().get(0).defaultValue()); - Assertions.assertEquals("gkw", model.parameters().get(0).sensitiveDefaultValue()); - Assertions.assertEquals("oyhqoivxcodwkwo", model.parameters().get(0).valueType()); - Assertions.assertEquals(true, model.parameters().get(0).valueSet()); - Assertions.assertEquals("chh", model.parameters().get(0).variable()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java deleted file mode 100644 index 181c10037228..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisPropertyOverrideTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisPropertyOverride; -import org.junit.jupiter.api.Assertions; - -public final class SsisPropertyOverrideTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisPropertyOverride model = BinaryData.fromString("{\"value\":\"datanvcqhmu\",\"isSensitive\":false}") - .toObject(SsisPropertyOverride.class); - Assertions.assertEquals(false, model.isSensitive()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisPropertyOverride model = new SsisPropertyOverride().withValue("datanvcqhmu").withIsSensitive(false); - model = BinaryData.fromObject(model).toObject(SsisPropertyOverride.class); - Assertions.assertEquals(false, model.isSensitive()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java deleted file mode 100644 index a70664025720..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SsisVariableTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SsisVariable; -import org.junit.jupiter.api.Assertions; - -public final class SsisVariableTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SsisVariable model = BinaryData.fromString( - "{\"id\":3948545961774018407,\"name\":\"hgvtepvrunudmakk\",\"description\":\"rnaxkczkwohdigey\",\"dataType\":\"cftsamod\",\"sensitive\":false,\"value\":\"ktvxerow\",\"sensitiveValue\":\"vrnnbegrafeo\"}") - .toObject(SsisVariable.class); - Assertions.assertEquals(3948545961774018407L, model.id()); - Assertions.assertEquals("hgvtepvrunudmakk", model.name()); - Assertions.assertEquals("rnaxkczkwohdigey", model.description()); - Assertions.assertEquals("cftsamod", model.dataType()); - Assertions.assertEquals(false, model.sensitive()); - Assertions.assertEquals("ktvxerow", model.value()); - Assertions.assertEquals("vrnnbegrafeo", model.sensitiveValue()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SsisVariable model = new SsisVariable().withId(3948545961774018407L) - .withName("hgvtepvrunudmakk") - .withDescription("rnaxkczkwohdigey") - .withDataType("cftsamod") - .withSensitive(false) - .withValue("ktvxerow") - .withSensitiveValue("vrnnbegrafeo"); - model = BinaryData.fromObject(model).toObject(SsisVariable.class); - Assertions.assertEquals(3948545961774018407L, model.id()); - Assertions.assertEquals("hgvtepvrunudmakk", model.name()); - Assertions.assertEquals("rnaxkczkwohdigey", model.description()); - Assertions.assertEquals("cftsamod", model.dataType()); - Assertions.assertEquals(false, model.sensitive()); - Assertions.assertEquals("ktvxerow", model.value()); - Assertions.assertEquals("vrnnbegrafeo", model.sensitiveValue()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java deleted file mode 100644 index b776edfe3392..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StagingSettingsTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.StagingSettings; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class StagingSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - StagingSettings model = BinaryData.fromString( - "{\"linkedServiceName\":{\"referenceName\":\"oepssoqdibygrqci\",\"parameters\":{\"ypftrdicst\":\"datakxikxiqxlxoks\",\"opgwpkat\":\"databqgatkl\",\"sdpyirtrlzkpje\":\"datacetyyvxkwobbw\",\"klowuth\":\"datagzhhfnaqclep\"}},\"path\":\"dataphnmllbljeh\",\"enableCompression\":\"dataxxuofneaqah\",\"\":{\"xaovubfllfke\":\"datanapxhtqwsd\",\"cbuzudkqoeoukvi\":\"databziibuabpvdwhvn\"}}") - .toObject(StagingSettings.class); - Assertions.assertEquals("oepssoqdibygrqci", model.linkedServiceName().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - StagingSettings model = new StagingSettings() - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("oepssoqdibygrqci") - .withParameters(mapOf("ypftrdicst", "datakxikxiqxlxoks", "opgwpkat", "databqgatkl", "sdpyirtrlzkpje", - "datacetyyvxkwobbw", "klowuth", "datagzhhfnaqclep"))) - .withPath("dataphnmllbljeh") - .withEnableCompression("dataxxuofneaqah") - .withAdditionalProperties(mapOf()); - model = BinaryData.fromObject(model).toObject(StagingSettings.class); - Assertions.assertEquals("oepssoqdibygrqci", model.linkedServiceName().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java deleted file mode 100644 index 8a2cf9e664da..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreReadSettingsTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class StoreReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - StoreReadSettings model = BinaryData.fromString( - "{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"dataj\",\"disableMetricsCollection\":\"datalzq\",\"\":{\"letb\":\"datar\",\"gcvqewx\":\"datalbhkij\",\"kzvjyvobevfbmxz\":\"dataqiwxeppuhkizo\",\"ilhdbbxm\":\"databpghsrlkpajiobyb\"}}") - .toObject(StoreReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - StoreReadSettings model = new StoreReadSettings().withMaxConcurrentConnections("dataj") - .withDisableMetricsCollection("datalzq") - .withAdditionalProperties(mapOf("type", "StoreReadSettings")); - model = BinaryData.fromObject(model).toObject(StoreReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java deleted file mode 100644 index 73cd70226457..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/StoreWriteSettingsTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.MetadataItem; -import com.azure.resourcemanager.datafactory.models.StoreWriteSettings; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public final class StoreWriteSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - StoreWriteSettings model = BinaryData.fromString( - "{\"type\":\"StoreWriteSettings\",\"maxConcurrentConnections\":\"databkorpu\",\"disableMetricsCollection\":\"datarppnjzbuvmfsfr\",\"copyBehavior\":\"datanqfnz\",\"metadata\":[{\"name\":\"datanularnupprdjfeps\",\"value\":\"datauzxoyanlhjeue\"},{\"name\":\"dataauxkvruryyqytaqj\",\"value\":\"datakhijghp\"},{\"name\":\"dataqpxkikd\",\"value\":\"databwyar\"},{\"name\":\"datahbjbl\",\"value\":\"datafqw\"}],\"\":{\"lnbxoucsddplgjfh\":\"dataxtpzdgyilwuiklb\",\"t\":\"dataiahokeacmadyo\",\"cctwbe\":\"datadauo\"}}") - .toObject(StoreWriteSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - StoreWriteSettings model = new StoreWriteSettings().withMaxConcurrentConnections("databkorpu") - .withDisableMetricsCollection("datarppnjzbuvmfsfr") - .withCopyBehavior("datanqfnz") - .withMetadata( - Arrays.asList(new MetadataItem().withName("datanularnupprdjfeps").withValue("datauzxoyanlhjeue"), - new MetadataItem().withName("dataauxkvruryyqytaqj").withValue("datakhijghp"), - new MetadataItem().withName("dataqpxkikd").withValue("databwyar"), - new MetadataItem().withName("datahbjbl").withValue("datafqw"))) - .withAdditionalProperties(mapOf("type", "StoreWriteSettings")); - model = BinaryData.fromObject(model).toObject(StoreWriteSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java deleted file mode 100644 index d0c129e11dcf..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SubResourceDebugResourceTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SubResourceDebugResource; -import org.junit.jupiter.api.Assertions; - -public final class SubResourceDebugResourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SubResourceDebugResource model - = BinaryData.fromString("{\"name\":\"wvz\"}").toObject(SubResourceDebugResource.class); - Assertions.assertEquals("wvz", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SubResourceDebugResource model = new SubResourceDebugResource().withName("wvz"); - model = BinaryData.fromObject(model).toObject(SubResourceDebugResource.class); - Assertions.assertEquals("wvz", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java deleted file mode 100644 index 3091cc1ce295..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTests.java +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.SwitchActivity; -import com.azure.resourcemanager.datafactory.models.SwitchCase; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SwitchActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SwitchActivity model = BinaryData.fromString( - "{\"type\":\"Switch\",\"typeProperties\":{\"on\":{\"value\":\"ctpzhoxagayno\"},\"cases\":[{\"value\":\"tefevhedfzxsqyp\",\"activities\":[{\"type\":\"Activity\",\"name\":\"lrgsfnjokrfpiqgq\",\"description\":\"rlbsglqiuqsqzumx\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"oziqcuiek\",\"dependencyConditions\":[]},{\"activity\":\"yaxpukxt\",\"dependencyConditions\":[]},{\"activity\":\"eejxwbredxmdpfx\",\"dependencyConditions\":[]},{\"activity\":\"kwyqo\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"ylqgenbe\",\"value\":\"datapaiat\"},{\"name\":\"oxvbwsa\",\"value\":\"datazvtinrortjtyls\"},{\"name\":\"udp\",\"value\":\"datahvnngijnzlo\"},{\"name\":\"xihf\",\"value\":\"datan\"}],\"\":{\"rfvbicd\":\"datajlrfwqnssxid\",\"qnqllmqeauizk\":\"datavypfobzxbfc\",\"e\":\"datajqfachfmvqnkgst\"}}]},{\"value\":\"jvs\",\"activities\":[{\"type\":\"Activity\",\"name\":\"wpqvgxpwmoefhbur\",\"description\":\"a\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"wpxvptqnqbd\",\"dependencyConditions\":[]},{\"activity\":\"w\",\"dependencyConditions\":[]},{\"activity\":\"otzlfhnf\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"wmcugveiie\",\"value\":\"dataoolblvcalbud\"},{\"name\":\"qthohfqbeaizv\",\"value\":\"datanhxgiydkrgdascm\"},{\"name\":\"nkabwpdvedmx\",\"value\":\"datakbgxgykx\"}],\"\":{\"zrsz\":\"datataonkfbgwfkczlde\",\"dcisceiauoy\":\"dataj\"}},{\"type\":\"Activity\",\"name\":\"udnxaw\",\"description\":\"mbmbvccuikpaviu\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"uio\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"ltcrtmebrssrlxen\",\"value\":\"datapthc\"},{\"name\":\"j\",\"value\":\"dataqmb\"},{\"name\":\"xensog\",\"value\":\"datavhqqxggncgyzvt\"},{\"name\":\"excjqrvpgukscrsb\",\"value\":\"datahkdemaxoaj\"}],\"\":{\"ajkkzkzprjqbmgf\":\"datacachsojgagey\",\"zbeemlsrtgbgcmut\":\"datawy\",\"lpuuf\":\"datakwd\"}}]},{\"value\":\"bdmmfdrxyejjqctq\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hiiatpdxpoxo\",\"description\":\"psmxfchnhjsaq\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"gmlpl\",\"dependencyConditions\":[]},{\"activity\":\"gpskynk\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"kvnlvwtslzblgv\",\"value\":\"datazhi\"}],\"\":{\"oysyutnredzkovtj\":\"datayqwlxk\",\"yblwayupaggkru\":\"datamcaprxhix\",\"yvhcboipxhgh\":\"datapunwy\",\"h\":\"datacw\"}},{\"type\":\"Activity\",\"name\":\"drvjktvpyanooyti\",\"description\":\"m\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"mmoquicr\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"uqacebcnh\",\"value\":\"datahsaumju\"},{\"name\":\"uspflvglvwkgcp\",\"value\":\"dataznekbrq\"},{\"name\":\"wvxwqqmvsrbmf\",\"value\":\"databtmljob\"}],\"\":{\"wnygbralcw\":\"datay\",\"dp\":\"dataebyczwegt\",\"xxvksqifr\":\"dataaz\"}}]},{\"value\":\"idvtur\",\"activities\":[{\"type\":\"Activity\",\"name\":\"ecmegolldlt\",\"description\":\"yhzt\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ejomj\",\"dependencyConditions\":[]},{\"activity\":\"lxjhrzg\",\"dependencyConditions\":[]},{\"activity\":\"fq\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qezgbqi\",\"value\":\"datawe\"},{\"name\":\"aceokrarzkza\",\"value\":\"dataznvyeuxd\"},{\"name\":\"zomtzpukm\",\"value\":\"datagslzb\"},{\"name\":\"nlfzq\",\"value\":\"datamxuo\"}],\"\":{\"eurbo\":\"dataesejdcpc\"}},{\"type\":\"Activity\",\"name\":\"zm\",\"description\":\"wyjcbjrptlt\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"h\",\"dependencyConditions\":[]},{\"activity\":\"wjlbygqfmeeuuurx\",\"dependencyConditions\":[]},{\"activity\":\"slxzwvygquiwcfq\",\"dependencyConditions\":[]},{\"activity\":\"ob\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"devq\",\"value\":\"datatejhvggykirqks\"},{\"name\":\"yya\",\"value\":\"datammim\"}],\"\":{\"qjb\":\"datawcd\",\"lvlfkwdtsbjmc\":\"datarxmlmibvczdjko\"}},{\"type\":\"Activity\",\"name\":\"sefezjyfaqdwfa\",\"description\":\"zdetslxerhw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ge\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"qziljrsycujnszn\",\"value\":\"dataskwjjupukh\"},{\"name\":\"pixuyyqsonfxsf\",\"value\":\"dataedjnxicufxt\"},{\"name\":\"ytedspkduhz\",\"value\":\"datavbgcf\"}],\"\":{\"qpjcuuyttuindp\":\"datae\",\"ahtlopbnsbjzrn\":\"datarijncaqgts\",\"ln\":\"datacagagmgu\"}},{\"type\":\"Activity\",\"name\":\"shnoxrmabb\",\"description\":\"zcdbqzwutakbvaq\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"wpllojdccd\",\"dependencyConditions\":[]},{\"activity\":\"ewtddigmmjve\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"crbkwcnvgx\",\"value\":\"datau\"},{\"name\":\"hmjcemkcwcbvhqj\",\"value\":\"dataiafzwhr\"}],\"\":{\"oafzrqm\":\"datant\",\"rryzbqpksoaxszuh\":\"datagfo\",\"dsdgvhecqkb\":\"dataojjbky\"}}]}],\"defaultActivities\":[{\"type\":\"Activity\",\"name\":\"mamyshn\",\"description\":\"upchzs\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xyqhctrrvuhgch\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\",\"Succeeded\"],\"\":{\"qfqawynsl\":\"dataxrzukajkihn\",\"ovavi\":\"datafx\",\"htfugppiudhylxq\":\"datariedf\"}}],\"userProperties\":[{\"name\":\"mqdri\",\"value\":\"datauxzcrfpkbchnhexm\"}],\"\":{\"pkqhgfw\":\"datalufojue\",\"uyhmltdgxiqrgrr\":\"datazvnsnaklob\",\"ytgnycnklqipnzgn\":\"datajfxueqyjee\",\"xkscyykrzrjjernj\":\"databuglalaazncnhzqn\"}},{\"type\":\"Activity\",\"name\":\"galodfsbhphwt\",\"description\":\"yiajhct\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"jcmnugp\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Skipped\",\"Completed\"],\"\":{\"bhjlcxvsmrxypbiw\":\"dataafuvbgcyar\",\"b\":\"datayznaixjsfasxfamn\"}}],\"userProperties\":[{\"name\":\"glqyb\",\"value\":\"datanxejxwcojjmp\"}],\"\":{\"uozjgkcxb\":\"datatqc\",\"qvrxouoqtestr\":\"datanwiignr\"}},{\"type\":\"Activity\",\"name\":\"qesk\",\"description\":\"rcyyzaalpwwcwie\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"sndqjbdtczxwqm\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Skipped\",\"Failed\"],\"\":{\"drmqkw\":\"datacu\",\"rygdpjufmvozqmtc\":\"datautbtr\",\"gctsatnrywouewrw\":\"datappj\"}},{\"activity\":\"qrvtwvyjprr\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Failed\"],\"\":{\"scswadvbwewwdf\":\"dataznm\"}},{\"activity\":\"iehwmaxl\",\"dependencyConditions\":[\"Succeeded\",\"Succeeded\",\"Failed\",\"Succeeded\"],\"\":{\"pllodowsjcvpvt\":\"dataeneq\",\"mnpblhalmh\":\"dataullivc\",\"ggiglpg\":\"dataatpwqoqnajmwpeao\"}},{\"activity\":\"hlwulug\",\"dependencyConditions\":[\"Skipped\",\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"tqrpbwykeeocpswq\":\"dataozvixamh\",\"ozpcco\":\"datapkodbquvftka\",\"m\":\"datawfqtqbnakmgyd\"}}],\"userProperties\":[{\"name\":\"wfdgeqzkpergzs\",\"value\":\"datarkkankjk\"},{\"name\":\"zudxqw\",\"value\":\"datavxvoqbruyma\"},{\"name\":\"j\",\"value\":\"datafofxi\"},{\"name\":\"jfmvydjax\",\"value\":\"datastuhlwzcn\"}],\"\":{\"yfhkx\":\"datagbym\",\"ps\":\"dataplhqzpwqpuy\",\"pjqfupoamc\":\"dataueacnfgt\"}},{\"type\":\"Activity\",\"name\":\"dzqxkgr\",\"description\":\"nqipskpynrsacdc\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"hiqodxsscirgqj\",\"dependencyConditions\":[\"Failed\"],\"\":{\"kgbhkvhldn\":\"datahkeyhfnjifuucojk\",\"fzydwexoyfseehvm\":\"datacxwjwsrdzmbz\"}},{\"activity\":\"yubvdoufwkhipa\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Failed\"],\"\":{\"hskhjjxes\":\"databbweaajgokpnb\",\"vi\":\"databuhkcshyhgahmt\"}},{\"activity\":\"eoijeppnpftwgtr\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Failed\"],\"\":{\"j\":\"datahsnej\",\"tvqrjutyfnmwmgha\":\"datawkwxnmqm\"}},{\"activity\":\"edqakhcc\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"pvwrb\":\"datasvtsjvdvza\"}}],\"userProperties\":[{\"name\":\"xuupqk\",\"value\":\"databemhwtme\"},{\"name\":\"tsfsjpvjwbxlg\",\"value\":\"dataepxbjjnxdgn\"}],\"\":{\"pbbepmmih\":\"datalt\",\"tdzgngnuuz\":\"datavadasuevu\"}}]},\"name\":\"hgfojdbov\",\"description\":\"nelqlqnw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"mnfnrpqsjzgncyks\",\"dependencyConditions\":[\"Failed\",\"Skipped\"],\"\":{\"ibiyl\":\"datawak\",\"edxdemcyrblwqhz\":\"dataf\",\"gs\":\"datar\",\"pw\":\"databzpozqluuaugktt\"}},{\"activity\":\"olajevww\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"ftcnzokdademqp\":\"dataqeguenteucaojjbb\",\"ay\":\"dataxekmdkbtmupm\",\"ibu\":\"datajocsq\",\"kgvwkdgsrtm\":\"datalppnevujkzb\"}}],\"userProperties\":[{\"name\":\"jygnhmoeoxso\",\"value\":\"dataljzodcxyg\"},{\"name\":\"mjf\",\"value\":\"datamtxfaucihqs\"},{\"name\":\"gtqaoacnlyzizw\",\"value\":\"dataqvgpidrtbcxi\"}],\"\":{\"jvmnooag\":\"datadblvbwueytxl\",\"enxmpiqlnwfb\":\"dataqnek\"}}") - .toObject(SwitchActivity.class); - Assertions.assertEquals("hgfojdbov", model.name()); - Assertions.assertEquals("nelqlqnw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("mnfnrpqsjzgncyks", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("jygnhmoeoxso", model.userProperties().get(0).name()); - Assertions.assertEquals("ctpzhoxagayno", model.on().value()); - Assertions.assertEquals("tefevhedfzxsqyp", model.cases().get(0).value()); - Assertions.assertEquals("lrgsfnjokrfpiqgq", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("rlbsglqiuqsqzumx", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("oziqcuiek", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("ylqgenbe", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("mamyshn", model.defaultActivities().get(0).name()); - Assertions.assertEquals("upchzs", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xyqhctrrvuhgch", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mqdri", model.defaultActivities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SwitchActivity model - = new SwitchActivity().withName("hgfojdbov") - .withDescription("nelqlqnw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mnfnrpqsjzgncyks") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("olajevww") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("jygnhmoeoxso").withValue("dataljzodcxyg"), - new UserProperty().withName("mjf").withValue("datamtxfaucihqs"), - new UserProperty().withName("gtqaoacnlyzizw").withValue("dataqvgpidrtbcxi"))) - .withOn(new Expression().withValue("ctpzhoxagayno")) - .withCases(Arrays.asList( - new SwitchCase().withValue("tefevhedfzxsqyp") - .withActivities(Arrays.asList(new Activity().withName("lrgsfnjokrfpiqgq") - .withDescription("rlbsglqiuqsqzumx") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("oziqcuiek") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yaxpukxt") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("eejxwbredxmdpfx") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kwyqo") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ylqgenbe").withValue("datapaiat"), - new UserProperty().withName("oxvbwsa").withValue("datazvtinrortjtyls"), - new UserProperty().withName("udp").withValue("datahvnngijnzlo"), - new UserProperty().withName("xihf").withValue("datan"))) - .withAdditionalProperties(mapOf("type", "Activity")))), - new SwitchCase().withValue("jvs") - .withActivities(Arrays.asList( - new Activity().withName("wpqvgxpwmoefhbur") - .withDescription("a") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("wpxvptqnqbd") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("w") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("otzlfhnf") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("wmcugveiie").withValue("dataoolblvcalbud"), - new UserProperty().withName("qthohfqbeaizv").withValue("datanhxgiydkrgdascm"), - new UserProperty().withName("nkabwpdvedmx").withValue("datakbgxgykx"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("udnxaw") - .withDescription("mbmbvccuikpaviu") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("uio") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("ltcrtmebrssrlxen").withValue("datapthc"), - new UserProperty().withName("j").withValue("dataqmb"), - new UserProperty().withName("xensog").withValue("datavhqqxggncgyzvt"), - new UserProperty().withName("excjqrvpgukscrsb").withValue("datahkdemaxoaj"))) - .withAdditionalProperties(mapOf("type", "Activity")))), - new SwitchCase().withValue("bdmmfdrxyejjqctq") - .withActivities(Arrays.asList( - new Activity().withName("hiiatpdxpoxo") - .withDescription("psmxfchnhjsaq") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("gmlpl") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("gpskynk") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("kvnlvwtslzblgv").withValue("datazhi"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("drvjktvpyanooyti") - .withDescription("m") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("mmoquicr") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("uqacebcnh").withValue("datahsaumju"), - new UserProperty().withName("uspflvglvwkgcp").withValue("dataznekbrq"), - new UserProperty().withName("wvxwqqmvsrbmf").withValue("databtmljob"))) - .withAdditionalProperties(mapOf("type", "Activity")))), - new SwitchCase().withValue("idvtur") - .withActivities(Arrays.asList( - new Activity().withName("ecmegolldlt") - .withDescription("yhzt") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ejomj") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lxjhrzg") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("fq") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("qezgbqi").withValue("datawe"), - new UserProperty().withName("aceokrarzkza").withValue("dataznvyeuxd"), - new UserProperty().withName("zomtzpukm").withValue("datagslzb"), - new UserProperty().withName("nlfzq").withValue("datamxuo"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("zm") - .withDescription("wyjcbjrptlt") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("h") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wjlbygqfmeeuuurx") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("slxzwvygquiwcfq") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ob") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("devq").withValue("datatejhvggykirqks"), - new UserProperty().withName("yya").withValue("datammim"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("sefezjyfaqdwfa") - .withDescription("zdetslxerhw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("ge") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("qziljrsycujnszn").withValue("dataskwjjupukh"), - new UserProperty().withName("pixuyyqsonfxsf").withValue("dataedjnxicufxt"), - new UserProperty().withName("ytedspkduhz").withValue("datavbgcf"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("shnoxrmabb") - .withDescription("zcdbqzwutakbvaq") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("wpllojdccd") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("ewtddigmmjve") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("crbkwcnvgx").withValue("datau"), - new UserProperty().withName("hmjcemkcwcbvhqj").withValue("dataiafzwhr"))) - .withAdditionalProperties(mapOf("type", "Activity")))))) - .withDefaultActivities(Arrays.asList(new Activity().withName("mamyshn") - .withDescription("upchzs") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("xyqhctrrvuhgch") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("mqdri").withValue("datauxzcrfpkbchnhexm"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("galodfsbhphwt") - .withDescription("yiajhct") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("jcmnugp") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("glqyb").withValue("datanxejxwcojjmp"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("qesk") - .withDescription("rcyyzaalpwwcwie") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("sndqjbdtczxwqm") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qrvtwvyjprr") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("iehwmaxl") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hlwulug") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("wfdgeqzkpergzs").withValue("datarkkankjk"), - new UserProperty().withName("zudxqw").withValue("datavxvoqbruyma"), - new UserProperty().withName("j").withValue("datafofxi"), - new UserProperty().withName("jfmvydjax").withValue("datastuhlwzcn"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("dzqxkgr") - .withDescription("nqipskpynrsacdc") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("hiqodxsscirgqj") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yubvdoufwkhipa") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("eoijeppnpftwgtr") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("edqakhcc") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("xuupqk").withValue("databemhwtme"), - new UserProperty().withName("tsfsjpvjwbxlg").withValue("dataepxbjjnxdgn"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(SwitchActivity.class); - Assertions.assertEquals("hgfojdbov", model.name()); - Assertions.assertEquals("nelqlqnw", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("mnfnrpqsjzgncyks", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("jygnhmoeoxso", model.userProperties().get(0).name()); - Assertions.assertEquals("ctpzhoxagayno", model.on().value()); - Assertions.assertEquals("tefevhedfzxsqyp", model.cases().get(0).value()); - Assertions.assertEquals("lrgsfnjokrfpiqgq", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("rlbsglqiuqsqzumx", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("oziqcuiek", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("ylqgenbe", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("mamyshn", model.defaultActivities().get(0).name()); - Assertions.assertEquals("upchzs", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("xyqhctrrvuhgch", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("mqdri", model.defaultActivities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java deleted file mode 100644 index 749fd6b0a139..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchActivityTypePropertiesTests.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SwitchActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.SwitchCase; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SwitchActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SwitchActivityTypeProperties model = BinaryData.fromString( - "{\"on\":{\"value\":\"ohxsmhvj\"},\"cases\":[{\"value\":\"hcpsuf\",\"activities\":[{\"type\":\"Activity\",\"name\":\"cvfxsvxkcyhkhw\",\"description\":\"vwfo\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"enrcovq\",\"dependencyConditions\":[]},{\"activity\":\"yd\",\"dependencyConditions\":[]},{\"activity\":\"yctkrga\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"mrxxmgzsln\",\"value\":\"datacawrhomavvni\"}],\"\":{\"qyetfxyxsk\":\"dataoone\",\"af\":\"datavkdpnqcuprlrz\"}},{\"type\":\"Activity\",\"name\":\"kcueov\",\"description\":\"mzee\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"inifnmcc\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"bytzm\",\"value\":\"dataamesdcm\"}],\"\":{\"hyrh\":\"dataatnf\",\"gyyufhcfeggy\":\"datavaap\",\"bdvazqsbrqspvltu\":\"datauj\",\"ftcinj\":\"datavujuxvllx\"}},{\"type\":\"Activity\",\"name\":\"rayoask\",\"description\":\"lqweo\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"bzawdwtzxqbqzpl\",\"dependencyConditions\":[]},{\"activity\":\"yjk\",\"dependencyConditions\":[]},{\"activity\":\"chpwvhiaxkmditcz\",\"dependencyConditions\":[]},{\"activity\":\"scqobuj\",\"dependencyConditions\":[]}],\"userProperties\":[{\"name\":\"fhlwgka\",\"value\":\"dataxp\"},{\"name\":\"kmbdhcc\",\"value\":\"datajotccbduwswf\"},{\"name\":\"qycubmeih\",\"value\":\"datagmewdmlk\"}],\"\":{\"obui\":\"datahslbiptsfqcw\",\"unjegomegma\":\"datafda\"}}]}],\"defaultActivities\":[{\"type\":\"Activity\",\"name\":\"eablknqnqqcgi\",\"description\":\"fdeogmwlpopjlgt\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"azpoxmx\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"mmved\":\"datacskpgnagncguqfn\",\"tpbezlucxbudaj\":\"datawdez\"}}],\"userProperties\":[{\"name\":\"vvbw\",\"value\":\"dataunr\"},{\"name\":\"myvbiuvxlhf\",\"value\":\"datakllxoahfvkyhfd\"},{\"name\":\"hqzvfzxseqscoy\",\"value\":\"dataxbaw\"},{\"name\":\"isbhkeskgnj\",\"value\":\"dataavoqcyl\"}],\"\":{\"pxwzewpngcocb\":\"datadmrqra\",\"nt\":\"datahmsjobzvsug\",\"bswb\":\"datalzbwtivgdcr\"}}]}") - .toObject(SwitchActivityTypeProperties.class); - Assertions.assertEquals("ohxsmhvj", model.on().value()); - Assertions.assertEquals("hcpsuf", model.cases().get(0).value()); - Assertions.assertEquals("cvfxsvxkcyhkhw", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("vwfo", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, - model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("enrcovq", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("mrxxmgzsln", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("eablknqnqqcgi", model.defaultActivities().get(0).name()); - Assertions.assertEquals("fdeogmwlpopjlgt", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("azpoxmx", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vvbw", model.defaultActivities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SwitchActivityTypeProperties model = new SwitchActivityTypeProperties() - .withOn(new Expression().withValue("ohxsmhvj")) - .withCases(Arrays.asList(new SwitchCase().withValue("hcpsuf") - .withActivities(Arrays.asList( - new Activity().withName("cvfxsvxkcyhkhw") - .withDescription("vwfo") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("enrcovq") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yd") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yctkrga") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("mrxxmgzsln").withValue("datacawrhomavvni"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("kcueov") - .withDescription("mzee") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("inifnmcc") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("bytzm").withValue("dataamesdcm"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("rayoask") - .withDescription("lqweo") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("bzawdwtzxqbqzpl") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("yjk") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("chpwvhiaxkmditcz") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("scqobuj") - .withDependencyConditions(Arrays.asList()) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("fhlwgka").withValue("dataxp"), - new UserProperty().withName("kmbdhcc").withValue("datajotccbduwswf"), - new UserProperty().withName("qycubmeih").withValue("datagmewdmlk"))) - .withAdditionalProperties(mapOf("type", "Activity")))))) - .withDefaultActivities(Arrays.asList(new Activity().withName("eablknqnqqcgi") - .withDescription("fdeogmwlpopjlgt") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("azpoxmx") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("vvbw").withValue("dataunr"), - new UserProperty().withName("myvbiuvxlhf").withValue("datakllxoahfvkyhfd"), - new UserProperty().withName("hqzvfzxseqscoy").withValue("dataxbaw"), - new UserProperty().withName("isbhkeskgnj").withValue("dataavoqcyl"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(SwitchActivityTypeProperties.class); - Assertions.assertEquals("ohxsmhvj", model.on().value()); - Assertions.assertEquals("hcpsuf", model.cases().get(0).value()); - Assertions.assertEquals("cvfxsvxkcyhkhw", model.cases().get(0).activities().get(0).name()); - Assertions.assertEquals("vwfo", model.cases().get(0).activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.cases().get(0).activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, - model.cases().get(0).activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("enrcovq", model.cases().get(0).activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals("mrxxmgzsln", model.cases().get(0).activities().get(0).userProperties().get(0).name()); - Assertions.assertEquals("eablknqnqqcgi", model.defaultActivities().get(0).name()); - Assertions.assertEquals("fdeogmwlpopjlgt", model.defaultActivities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.defaultActivities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, - model.defaultActivities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("azpoxmx", model.defaultActivities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.defaultActivities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("vvbw", model.defaultActivities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java deleted file mode 100644 index b35a6982e6b6..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SwitchCaseTests.java +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.SwitchCase; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SwitchCaseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SwitchCase model = BinaryData.fromString( - "{\"value\":\"zmxvd\",\"activities\":[{\"type\":\"Activity\",\"name\":\"wyiko\",\"description\":\"epkknyvn\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"onjdhbq\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Succeeded\",\"Completed\"],\"\":{\"aykzwij\":\"dataf\"}},{\"activity\":\"xwmjlmosqh\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Completed\"],\"\":{\"a\":\"datasqruy\",\"zjzhixciv\":\"dataem\",\"clmk\":\"dataokaujj\",\"ceowvirbs\":\"dataswtkhfc\"}}],\"userProperties\":[{\"name\":\"lkhepnm\",\"value\":\"datagczcpoydaifx\"},{\"name\":\"bxqzczcc\",\"value\":\"datalpigpzpl\"},{\"name\":\"aoiid\",\"value\":\"dataknsqdr\"}],\"\":{\"eeuyxxrwovgwqz\":\"dataanpjvqrwl\",\"zsqbckq\":\"dataugsbwqrotpvyt\",\"vy\":\"datateminzn\",\"vi\":\"datacjzkgy\"}},{\"type\":\"Activity\",\"name\":\"eskindgmk\",\"description\":\"uvyuzzw\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"xrnsyv\",\"dependencyConditions\":[\"Failed\",\"Failed\"],\"\":{\"im\":\"datavcogq\"}},{\"activity\":\"cthrrxrki\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ezufxuugvdbpjo\":\"databmizbev\",\"avlnk\":\"datacpystc\",\"goxfdykezoxh\":\"datamvnvfg\",\"mqmbifpciammpea\":\"dataza\"}},{\"activity\":\"dhebzquqggjxklo\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Failed\",\"Succeeded\"],\"\":{\"nxoxjghumvptb\":\"datajwmisxgji\"}}],\"userProperties\":[{\"name\":\"llvfea\",\"value\":\"datacjuzzzil\"}],\"\":{\"rnxsy\":\"datarnovbgdba\",\"qjinrymzlqircivx\":\"datapilqojdmzejcpz\"}},{\"type\":\"Activity\",\"name\":\"qzmvg\",\"description\":\"tkcvnyik\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"vgxelzuvdyz\",\"dependencyConditions\":[\"Failed\",\"Completed\",\"Succeeded\"],\"\":{\"tcxhpntewvfvs\":\"dataqkklzyhavtivefsr\",\"dervnnfieaqbvg\":\"datadmcoxobrv\",\"ubqemrxmr\":\"dataehggeeagbrslbzc\"}},{\"activity\":\"be\",\"dependencyConditions\":[\"Skipped\",\"Succeeded\"],\"\":{\"vgagdnzvo\":\"datawqnwxohb\",\"zcpifasifdtiocsf\":\"datarnqnurunky\",\"borynmadtyhm\":\"datacyyicascvcmthu\"}}],\"userProperties\":[{\"name\":\"oemhvnqwd\",\"value\":\"datahnc\"}],\"\":{\"oatqnhrhxhmtxp\":\"dataqijeq\"}},{\"type\":\"Activity\",\"name\":\"dtmr\",\"description\":\"kntizn\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"ybbkdhwa\",\"dependencyConditions\":[\"Completed\",\"Failed\"],\"\":{\"qnpnpggbuajw\":\"datarviqrzwslioucae\",\"dnmuirtkqztkx\":\"datargq\",\"iegpdhityt\":\"datahixfuuzaczmejf\"}},{\"activity\":\"etwdskocmqhz\",\"dependencyConditions\":[\"Failed\",\"Skipped\",\"Succeeded\",\"Failed\"],\"\":{\"cyngdgka\":\"datajgfxvjqevmzhk\",\"hrlb\":\"datanxy\"}},{\"activity\":\"hd\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"h\":\"datacbcxbiis\",\"uuetmqzuen\":\"dataqqaedgwghqq\",\"mj\":\"datallqvroopk\"}},{\"activity\":\"opibaxkywqs\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"inoorabspf\":\"datafzdkpfeupacahlsa\",\"jmwq\":\"datany\",\"bdzdh\":\"datamhfv\"}}],\"userProperties\":[{\"name\":\"fidropfpuc\",\"value\":\"datamdmbysvqbgndfzh\"},{\"name\":\"yxccxeu\",\"value\":\"dataioawrorexicwb\"},{\"name\":\"uppkzuxsbbmxfut\",\"value\":\"datay\"},{\"name\":\"helyopobg\",\"value\":\"datal\"}],\"\":{\"tqhghygzayazr\":\"dataiwjezadkfmpiff\"}}]}") - .toObject(SwitchCase.class); - Assertions.assertEquals("zmxvd", model.value()); - Assertions.assertEquals("wyiko", model.activities().get(0).name()); - Assertions.assertEquals("epkknyvn", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("onjdhbq", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lkhepnm", model.activities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SwitchCase model = new SwitchCase().withValue("zmxvd") - .withActivities(Arrays.asList( - new Activity().withName("wyiko") - .withDescription("epkknyvn") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList(new ActivityDependency().withActivity("onjdhbq") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xwmjlmosqh") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("lkhepnm").withValue("datagczcpoydaifx"), - new UserProperty().withName("bxqzczcc").withValue("datalpigpzpl"), - new UserProperty().withName("aoiid").withValue("dataknsqdr"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("eskindgmk") - .withDescription("uvyuzzw") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("xrnsyv") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("cthrrxrki") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dhebzquqggjxklo") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.FAILED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("llvfea").withValue("datacjuzzzil"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("qzmvg") - .withDescription("tkcvnyik") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("vgxelzuvdyz") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("be") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("oemhvnqwd").withValue("datahnc"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("dtmr") - .withDescription("kntizn") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ybbkdhwa") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("etwdskocmqhz") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hd") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("opibaxkywqs") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("fidropfpuc").withValue("datamdmbysvqbgndfzh"), - new UserProperty().withName("yxccxeu").withValue("dataioawrorexicwb"), - new UserProperty().withName("uppkzuxsbbmxfut").withValue("datay"), - new UserProperty().withName("helyopobg").withValue("datal"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(SwitchCase.class); - Assertions.assertEquals("zmxvd", model.value()); - Assertions.assertEquals("wyiko", model.activities().get(0).name()); - Assertions.assertEquals("epkknyvn", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("onjdhbq", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.FAILED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("lkhepnm", model.activities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java deleted file mode 100644 index a081dcabb70f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SybaseSource; - -public final class SybaseSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SybaseSource model = BinaryData.fromString( - "{\"type\":\"SybaseSource\",\"query\":\"datar\",\"queryTimeout\":\"datah\",\"additionalColumns\":\"dataqfl\",\"sourceRetryCount\":\"dataqcxyiqppa\",\"sourceRetryWait\":\"dataiqrlla\",\"maxConcurrentConnections\":\"datalbtkx\",\"disableMetricsCollection\":\"datajzgnla\",\"\":{\"wbobawlntenhnq\":\"datatexaugojvgjez\",\"qor\":\"datavxghbehheho\",\"yugzlvgjirjkkrs\":\"datavwlceoj\"}}") - .toObject(SybaseSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SybaseSource model = new SybaseSource().withSourceRetryCount("dataqcxyiqppa") - .withSourceRetryWait("dataiqrlla") - .withMaxConcurrentConnections("datalbtkx") - .withDisableMetricsCollection("datajzgnla") - .withQueryTimeout("datah") - .withAdditionalColumns("dataqfl") - .withQuery("datar"); - model = BinaryData.fromObject(model).toObject(SybaseSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java deleted file mode 100644 index c5446606fef7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTests.java +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.SybaseTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SybaseTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SybaseTableDataset model = BinaryData.fromString( - "{\"type\":\"SybaseTable\",\"typeProperties\":{\"tableName\":\"datatbl\"},\"description\":\"vrj\",\"structure\":\"datanotdofqvpbqsdqk\",\"schema\":\"databqsbbmitaf\",\"linkedServiceName\":{\"referenceName\":\"azgcxsvqlcquf\",\"parameters\":{\"gc\":\"datamxow\"}},\"parameters\":{\"quksx\":{\"type\":\"Bool\",\"defaultValue\":\"dataehlkarvti\"},\"ggvmfnnbbx\":{\"type\":\"Object\",\"defaultValue\":\"datasgofunswhpce\"},\"deetxtpwcvgifws\":{\"type\":\"Array\",\"defaultValue\":\"datafkk\"},\"onbz\":{\"type\":\"String\",\"defaultValue\":\"datazijaciwmmpdtq\"}},\"annotations\":[\"datafzyviiwsua\",\"datazhw\",\"datayui\",\"datakzqqhbtfloilmkf\"],\"folder\":{\"name\":\"iipj\"},\"\":{\"tug\":\"datavyvuoikdlpsx\",\"swxvjelei\":\"datawimqnryclocfm\",\"aevyk\":\"dataqhdxtwwulkryb\"}}") - .toObject(SybaseTableDataset.class); - Assertions.assertEquals("vrj", model.description()); - Assertions.assertEquals("azgcxsvqlcquf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("quksx").type()); - Assertions.assertEquals("iipj", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SybaseTableDataset model = new SybaseTableDataset().withDescription("vrj") - .withStructure("datanotdofqvpbqsdqk") - .withSchema("databqsbbmitaf") - .withLinkedServiceName( - new LinkedServiceReference().withReferenceName("azgcxsvqlcquf").withParameters(mapOf("gc", "datamxow"))) - .withParameters(mapOf("quksx", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataehlkarvti"), - "ggvmfnnbbx", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datasgofunswhpce"), - "deetxtpwcvgifws", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datafkk"), "onbz", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datazijaciwmmpdtq"))) - .withAnnotations(Arrays.asList("datafzyviiwsua", "datazhw", "datayui", "datakzqqhbtfloilmkf")) - .withFolder(new DatasetFolder().withName("iipj")) - .withTableName("datatbl"); - model = BinaryData.fromObject(model).toObject(SybaseTableDataset.class); - Assertions.assertEquals("vrj", model.description()); - Assertions.assertEquals("azgcxsvqlcquf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.BOOL, model.parameters().get("quksx").type()); - Assertions.assertEquals("iipj", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java deleted file mode 100644 index f147ca3a3e84..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SybaseTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SybaseTableDatasetTypeProperties; - -public final class SybaseTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SybaseTableDatasetTypeProperties model - = BinaryData.fromString("{\"tableName\":\"datajec\"}").toObject(SybaseTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SybaseTableDatasetTypeProperties model = new SybaseTableDatasetTypeProperties().withTableName("datajec"); - model = BinaryData.fromObject(model).toObject(SybaseTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java deleted file mode 100644 index ceb5465a0b17..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTests.java +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityPolicy; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; -import com.azure.resourcemanager.datafactory.models.BigDataPoolReferenceType; -import com.azure.resourcemanager.datafactory.models.ConfigurationType; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.NotebookParameter; -import com.azure.resourcemanager.datafactory.models.NotebookParameterType; -import com.azure.resourcemanager.datafactory.models.NotebookReferenceType; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationReferenceType; -import com.azure.resourcemanager.datafactory.models.SynapseNotebookActivity; -import com.azure.resourcemanager.datafactory.models.SynapseNotebookReference; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SynapseNotebookActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SynapseNotebookActivity model = BinaryData.fromString( - "{\"type\":\"SynapseNotebook\",\"typeProperties\":{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"datalfetlmmdgeb\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"dataxattthazqdweryzg\"},\"parameters\":{\"ifshsbtpldzi\":{\"value\":\"datandolezbyaszuo\",\"type\":\"int\"}},\"executorSize\":\"datatcnqkwghdaebya\",\"conf\":\"datakfapzf\",\"driverSize\":\"datavkorsrpmrhekxm\",\"numExecutors\":\"dataiq\",\"configurationType\":\"Artifact\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"datawbdk\"},\"sparkConfig\":{\"kwjmsogzc\":\"datadaehpfrexb\",\"j\":\"dataz\",\"lrzvjvlnafpfou\":\"datawqiawjevdnpkdmq\"}},\"linkedServiceName\":{\"referenceName\":\"aqba\",\"parameters\":{\"urxwtfpeuftpjldl\":\"datajwcdjxqxfvk\",\"mmbxkbtberyql\":\"dataforvsypjytgz\",\"gm\":\"datazebmikojpggwuwa\"}},\"policy\":{\"timeout\":\"datagcnllqfbeuugir\",\"retry\":\"dataezpsydkgtdwlvsf\",\"retryIntervalInSeconds\":1265498269,\"secureInput\":true,\"secureOutput\":true,\"\":{\"mjgbz\":\"datazdcthhe\",\"wvdwmuytkkfoton\":\"datadonyleis\"}},\"name\":\"r\",\"description\":\"ozu\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"kyzyi\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Failed\"],\"\":{\"drpwck\":\"datafqkeaipbkenqc\",\"tlbaxiaerpoasy\":\"datavtknu\"}},{\"activity\":\"zebbrqnnrdb\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Skipped\"],\"\":{\"tmqymgiy\":\"dataniwk\",\"pivsowcwehjqy\":\"datagee\",\"nxcayyvri\":\"datallepppdfrgobr\"}},{\"activity\":\"vmmef\",\"dependencyConditions\":[\"Failed\",\"Succeeded\",\"Succeeded\",\"Failed\"],\"\":{\"vriglutxzasc\":\"dataixczxk\"}},{\"activity\":\"lwfefygnafpi\",\"dependencyConditions\":[\"Succeeded\",\"Completed\"],\"\":{\"aypsvedxphf\":\"datau\",\"qqwxjnkbes\":\"datao\",\"qoujhmdpe\":\"datammitvviqs\"}}],\"userProperties\":[{\"name\":\"qwjqevwtkrjqnciw\",\"value\":\"databjwngrrpdt\"},{\"name\":\"nhctkgllmpku\",\"value\":\"datablucxyhtkyq\"},{\"name\":\"ynvtimpgusroqk\",\"value\":\"datawkvojr\"}],\"\":{\"hkplxbpxzdu\":\"datajtjqhfkwsmgkomqf\"}}") - .toObject(SynapseNotebookActivity.class); - Assertions.assertEquals("r", model.name()); - Assertions.assertEquals("ozu", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("kyzyi", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("qwjqevwtkrjqnciw", model.userProperties().get(0).name()); - Assertions.assertEquals("aqba", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1265498269, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("ifshsbtpldzi").type()); - Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, - model.targetSparkConfiguration().type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SynapseNotebookActivity model = new SynapseNotebookActivity().withName("r") - .withDescription("ozu") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("kyzyi") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("zebbrqnnrdb") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vmmef") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, DependencyCondition.SUCCEEDED, - DependencyCondition.SUCCEEDED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lwfefygnafpi") - .withDependencyConditions( - Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("qwjqevwtkrjqnciw").withValue("databjwngrrpdt"), - new UserProperty().withName("nhctkgllmpku").withValue("datablucxyhtkyq"), - new UserProperty().withName("ynvtimpgusroqk").withValue("datawkvojr"))) - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("aqba") - .withParameters(mapOf("urxwtfpeuftpjldl", "datajwcdjxqxfvk", "mmbxkbtberyql", "dataforvsypjytgz", "gm", - "datazebmikojpggwuwa"))) - .withPolicy(new ActivityPolicy().withTimeout("datagcnllqfbeuugir") - .withRetry("dataezpsydkgtdwlvsf") - .withRetryIntervalInSeconds(1265498269) - .withSecureInput(true) - .withSecureOutput(true) - .withAdditionalProperties(mapOf())) - .withNotebook(new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) - .withReferenceName("datalfetlmmdgeb")) - .withSparkPool( - new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) - .withReferenceName("dataxattthazqdweryzg")) - .withParameters(mapOf("ifshsbtpldzi", - new NotebookParameter().withValue("datandolezbyaszuo").withType(NotebookParameterType.INT))) - .withExecutorSize("datatcnqkwghdaebya") - .withConf("datakfapzf") - .withDriverSize("datavkorsrpmrhekxm") - .withNumExecutors("dataiq") - .withConfigurationType(ConfigurationType.ARTIFACT) - .withTargetSparkConfiguration(new SparkConfigurationParametrizationReference() - .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) - .withReferenceName("datawbdk")) - .withSparkConfig( - mapOf("kwjmsogzc", "datadaehpfrexb", "j", "dataz", "lrzvjvlnafpfou", "datawqiawjevdnpkdmq")); - model = BinaryData.fromObject(model).toObject(SynapseNotebookActivity.class); - Assertions.assertEquals("r", model.name()); - Assertions.assertEquals("ozu", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SUCCEEDED, model.onInactiveMarkAs()); - Assertions.assertEquals("kyzyi", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("qwjqevwtkrjqnciw", model.userProperties().get(0).name()); - Assertions.assertEquals("aqba", model.linkedServiceName().referenceName()); - Assertions.assertEquals(1265498269, model.policy().retryIntervalInSeconds()); - Assertions.assertEquals(true, model.policy().secureInput()); - Assertions.assertEquals(true, model.policy().secureOutput()); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.INT, model.parameters().get("ifshsbtpldzi").type()); - Assertions.assertEquals(ConfigurationType.ARTIFACT, model.configurationType()); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, - model.targetSparkConfiguration().type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java deleted file mode 100644 index 6bbc8dbbcc4d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookActivityTypePropertiesTests.java +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.SynapseNotebookActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; -import com.azure.resourcemanager.datafactory.models.BigDataPoolReferenceType; -import com.azure.resourcemanager.datafactory.models.ConfigurationType; -import com.azure.resourcemanager.datafactory.models.NotebookParameter; -import com.azure.resourcemanager.datafactory.models.NotebookParameterType; -import com.azure.resourcemanager.datafactory.models.NotebookReferenceType; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationParametrizationReference; -import com.azure.resourcemanager.datafactory.models.SparkConfigurationReferenceType; -import com.azure.resourcemanager.datafactory.models.SynapseNotebookReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class SynapseNotebookActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SynapseNotebookActivityTypeProperties model = BinaryData.fromString( - "{\"notebook\":{\"type\":\"NotebookReference\",\"referenceName\":\"datazdpgtbytibpg\"},\"sparkPool\":{\"type\":\"BigDataPoolReference\",\"referenceName\":\"dataiujfputci\"},\"parameters\":{\"ou\":{\"value\":\"datapksjwaglhwnnfgy\",\"type\":\"float\"},\"mfqozvfeljytshj\":{\"value\":\"datamw\",\"type\":\"bool\"},\"goujsvhezhezy\":{\"value\":\"datao\",\"type\":\"int\"},\"yzjzeylthdr\":{\"value\":\"dataofayyshfv\",\"type\":\"float\"}},\"executorSize\":\"dataeidblred\",\"conf\":\"datacckticwg\",\"driverSize\":\"datavqybvgceb\",\"numExecutors\":\"datask\",\"configurationType\":\"Customized\",\"targetSparkConfiguration\":{\"type\":\"SparkConfigurationReference\",\"referenceName\":\"datatlzomsqebmfo\"},\"sparkConfig\":{\"eozgjtuhdgmshuyq\":\"datayfuliatbosnla\",\"ptoentuve\":\"datahbpr\",\"xwrets\":\"datamtlfbzlziduq\"}}") - .toObject(SynapseNotebookActivityTypeProperties.class); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.FLOAT, model.parameters().get("ou").type()); - Assertions.assertEquals(ConfigurationType.CUSTOMIZED, model.configurationType()); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, - model.targetSparkConfiguration().type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SynapseNotebookActivityTypeProperties model = new SynapseNotebookActivityTypeProperties() - .withNotebook(new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) - .withReferenceName("datazdpgtbytibpg")) - .withSparkPool( - new BigDataPoolParametrizationReference().withType(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE) - .withReferenceName("dataiujfputci")) - .withParameters(mapOf("ou", - new NotebookParameter().withValue("datapksjwaglhwnnfgy").withType(NotebookParameterType.FLOAT), - "mfqozvfeljytshj", new NotebookParameter().withValue("datamw").withType(NotebookParameterType.BOOL), - "goujsvhezhezy", new NotebookParameter().withValue("datao").withType(NotebookParameterType.INT), - "yzjzeylthdr", - new NotebookParameter().withValue("dataofayyshfv").withType(NotebookParameterType.FLOAT))) - .withExecutorSize("dataeidblred") - .withConf("datacckticwg") - .withDriverSize("datavqybvgceb") - .withNumExecutors("datask") - .withConfigurationType(ConfigurationType.CUSTOMIZED) - .withTargetSparkConfiguration(new SparkConfigurationParametrizationReference() - .withType(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE) - .withReferenceName("datatlzomsqebmfo")) - .withSparkConfig( - mapOf("eozgjtuhdgmshuyq", "datayfuliatbosnla", "ptoentuve", "datahbpr", "xwrets", "datamtlfbzlziduq")); - model = BinaryData.fromObject(model).toObject(SynapseNotebookActivityTypeProperties.class); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.notebook().type()); - Assertions.assertEquals(BigDataPoolReferenceType.BIG_DATA_POOL_REFERENCE, model.sparkPool().type()); - Assertions.assertEquals(NotebookParameterType.FLOAT, model.parameters().get("ou").type()); - Assertions.assertEquals(ConfigurationType.CUSTOMIZED, model.configurationType()); - Assertions.assertEquals(SparkConfigurationReferenceType.SPARK_CONFIGURATION_REFERENCE, - model.targetSparkConfiguration().type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java deleted file mode 100644 index ea2edbbefe7e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseNotebookReferenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.NotebookReferenceType; -import com.azure.resourcemanager.datafactory.models.SynapseNotebookReference; -import org.junit.jupiter.api.Assertions; - -public final class SynapseNotebookReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SynapseNotebookReference model - = BinaryData.fromString("{\"type\":\"NotebookReference\",\"referenceName\":\"dataetrqudxzrbgqt\"}") - .toObject(SynapseNotebookReference.class); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SynapseNotebookReference model - = new SynapseNotebookReference().withType(NotebookReferenceType.NOTEBOOK_REFERENCE) - .withReferenceName("dataetrqudxzrbgqt"); - model = BinaryData.fromObject(model).toObject(SynapseNotebookReference.class); - Assertions.assertEquals(NotebookReferenceType.NOTEBOOK_REFERENCE, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java deleted file mode 100644 index ce278b42cc27..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/SynapseSparkJobReferenceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SparkJobReferenceType; -import com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference; -import org.junit.jupiter.api.Assertions; - -public final class SynapseSparkJobReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SynapseSparkJobReference model - = BinaryData.fromString("{\"type\":\"SparkJobDefinitionReference\",\"referenceName\":\"dataxqryy\"}") - .toObject(SynapseSparkJobReference.class); - Assertions.assertEquals(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SynapseSparkJobReference model - = new SynapseSparkJobReference().withType(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE) - .withReferenceName("dataxqryy"); - model = BinaryData.fromObject(model).toObject(SynapseSparkJobReference.class); - Assertions.assertEquals(SparkJobReferenceType.SPARK_JOB_DEFINITION_REFERENCE, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java deleted file mode 100644 index af8f880f9750..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularSourceTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TabularSource; - -public final class TabularSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TabularSource model = BinaryData.fromString( - "{\"type\":\"TabularSource\",\"queryTimeout\":\"datayhjhrkfptiiommi\",\"additionalColumns\":\"datafmbvmajcmpohjdvf\",\"sourceRetryCount\":\"datab\",\"sourceRetryWait\":\"datawymahboi\",\"maxConcurrentConnections\":\"dataiuyqdjkugcjssqpk\",\"disableMetricsCollection\":\"dataryhvshkvupbzqwwt\",\"\":{\"vbjvvcogupsho\":\"datayiwuvery\",\"yyvdjoorbuuhbcc\":\"datafas\"}}") - .toObject(TabularSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TabularSource model = new TabularSource().withSourceRetryCount("datab") - .withSourceRetryWait("datawymahboi") - .withMaxConcurrentConnections("dataiuyqdjkugcjssqpk") - .withDisableMetricsCollection("dataryhvshkvupbzqwwt") - .withQueryTimeout("datayhjhrkfptiiommi") - .withAdditionalColumns("datafmbvmajcmpohjdvf"); - model = BinaryData.fromObject(model).toObject(TabularSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java deleted file mode 100644 index 91cf909bff02..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TabularTranslatorTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TabularTranslator; -import com.azure.resourcemanager.datafactory.models.TypeConversionSettings; - -public final class TabularTranslatorTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TabularTranslator model = BinaryData.fromString( - "{\"type\":\"TabularTranslator\",\"columnMappings\":\"dataeib\",\"schemaMapping\":\"datawikmgwxysut\",\"collectionReference\":\"datafdhrifekstrms\",\"mapComplexValuesToString\":\"datadgrzkeuplorn\",\"mappings\":\"datassmaaxz\",\"typeConversion\":\"datalnvupi\",\"typeConversionSettings\":{\"allowDataTruncation\":\"datazyhtbjyycacoelvo\",\"treatBooleanAsNumber\":\"datatm\",\"dateTimeFormat\":\"dataalqqrymjwwo\",\"dateTimeOffsetFormat\":\"datanefellhdsgogdu\",\"timeSpanFormat\":\"datamalthcbvuvwdp\",\"culture\":\"dataphnag\"},\"\":{\"ml\":\"dataaxjmnbm\",\"vlrsfmtrmod\":\"dataqatswvtddpicwnb\",\"pqrke\":\"datanxerkaiikbpfaq\",\"uaez\":\"datah\"}}") - .toObject(TabularTranslator.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TabularTranslator model = new TabularTranslator().withColumnMappings("dataeib") - .withSchemaMapping("datawikmgwxysut") - .withCollectionReference("datafdhrifekstrms") - .withMapComplexValuesToString("datadgrzkeuplorn") - .withMappings("datassmaaxz") - .withTypeConversion("datalnvupi") - .withTypeConversionSettings(new TypeConversionSettings().withAllowDataTruncation("datazyhtbjyycacoelvo") - .withTreatBooleanAsNumber("datatm") - .withDateTimeFormat("dataalqqrymjwwo") - .withDateTimeOffsetFormat("datanefellhdsgogdu") - .withTimeSpanFormat("datamalthcbvuvwdp") - .withCulture("dataphnag")); - model = BinaryData.fromObject(model).toObject(TabularTranslator.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java deleted file mode 100644 index 741aa91a9633..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarGZipReadSettingsTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TarGZipReadSettings; - -public final class TarGZipReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TarGZipReadSettings model = BinaryData.fromString( - "{\"type\":\"TarGZipReadSettings\",\"preserveCompressionFileNameAsFolder\":\"datazyzbv\",\"\":{\"efmpzdwer\":\"datathsysefilncyqnkp\",\"mndzbfoia\":\"datackzxdlupgtp\",\"tjwsdxyzgrrllz\":\"datapglntnsiuxy\",\"kobxvexusjfjuphj\":\"dataxgomhenqnovt\"}}") - .toObject(TarGZipReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TarGZipReadSettings model = new TarGZipReadSettings().withPreserveCompressionFileNameAsFolder("datazyzbv"); - model = BinaryData.fromObject(model).toObject(TarGZipReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java deleted file mode 100644 index 0b3759e77c48..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TarReadSettingsTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TarReadSettings; - -public final class TarReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TarReadSettings model = BinaryData.fromString( - "{\"type\":\"TarReadSettings\",\"preserveCompressionFileNameAsFolder\":\"datacpbmyghq\",\"\":{\"wkqztqrnreyj\":\"datasbb\",\"gvsjfpsyqvgaaym\":\"datamnrweevtunjdcl\",\"m\":\"datakexhi\",\"zkktcrcctt\":\"dataljhkgxcewzgo\"}}") - .toObject(TarReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TarReadSettings model = new TarReadSettings().withPreserveCompressionFileNameAsFolder("datacpbmyghq"); - model = BinaryData.fromObject(model).toObject(TarReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java deleted file mode 100644 index 4a50952c1253..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataPartitionSettingsTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TeradataPartitionSettings; - -public final class TeradataPartitionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TeradataPartitionSettings model = BinaryData.fromString( - "{\"partitionColumnName\":\"dataift\",\"partitionUpperBound\":\"dataupuukpsw\",\"partitionLowerBound\":\"datatduchcfndijz\"}") - .toObject(TeradataPartitionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TeradataPartitionSettings model = new TeradataPartitionSettings().withPartitionColumnName("dataift") - .withPartitionUpperBound("dataupuukpsw") - .withPartitionLowerBound("datatduchcfndijz"); - model = BinaryData.fromObject(model).toObject(TeradataPartitionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java deleted file mode 100644 index 12b62102c877..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataSourceTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TeradataPartitionSettings; -import com.azure.resourcemanager.datafactory.models.TeradataSource; - -public final class TeradataSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TeradataSource model = BinaryData.fromString( - "{\"type\":\"TeradataSource\",\"query\":\"datapdnnsujx\",\"partitionOption\":\"dataeqljzkhncaeyk\",\"partitionSettings\":{\"partitionColumnName\":\"datatztnprns\",\"partitionUpperBound\":\"dataniahvlz\",\"partitionLowerBound\":\"dataqwiubgbltjyisj\"},\"queryTimeout\":\"datauwyluktz\",\"additionalColumns\":\"datax\",\"sourceRetryCount\":\"dataa\",\"sourceRetryWait\":\"dataheguxrziryxrpjr\",\"maxConcurrentConnections\":\"datamxqvv\",\"disableMetricsCollection\":\"dataofts\",\"\":{\"vjemp\":\"datawusfbrnjvzly\",\"wzntbi\":\"dataubs\",\"fg\":\"datau\"}}") - .toObject(TeradataSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TeradataSource model = new TeradataSource().withSourceRetryCount("dataa") - .withSourceRetryWait("dataheguxrziryxrpjr") - .withMaxConcurrentConnections("datamxqvv") - .withDisableMetricsCollection("dataofts") - .withQueryTimeout("datauwyluktz") - .withAdditionalColumns("datax") - .withQuery("datapdnnsujx") - .withPartitionOption("dataeqljzkhncaeyk") - .withPartitionSettings(new TeradataPartitionSettings().withPartitionColumnName("datatztnprns") - .withPartitionUpperBound("dataniahvlz") - .withPartitionLowerBound("dataqwiubgbltjyisj")); - model = BinaryData.fromObject(model).toObject(TeradataSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java deleted file mode 100644 index abf09177a843..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.TeradataTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TeradataTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TeradataTableDataset model = BinaryData.fromString( - "{\"type\":\"TeradataTable\",\"typeProperties\":{\"database\":\"datamgfjculojhhylxd\",\"table\":\"datafiyymotuzb\"},\"description\":\"wjmtftcvelniru\",\"structure\":\"dataqnstpaqpibjg\",\"schema\":\"dataswmehfxrttxb\",\"linkedServiceName\":{\"referenceName\":\"sennqfabq\",\"parameters\":{\"xmsynbkd\":\"dataalectcxsfmbz\",\"brnxhjtlxfikj\":\"datanyufxuzmsvzyq\"}},\"parameters\":{\"htrxbozp\":{\"type\":\"String\",\"defaultValue\":\"datavwwuasnjeg\"}},\"annotations\":[\"datanwjzbqblxrnwvdsv\",\"dataqizawwsds\",\"datargf\"],\"folder\":{\"name\":\"saw\"},\"\":{\"kmwzrdqyoy\":\"datauffhxf\"}}") - .toObject(TeradataTableDataset.class); - Assertions.assertEquals("wjmtftcvelniru", model.description()); - Assertions.assertEquals("sennqfabq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("htrxbozp").type()); - Assertions.assertEquals("saw", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TeradataTableDataset model = new TeradataTableDataset().withDescription("wjmtftcvelniru") - .withStructure("dataqnstpaqpibjg") - .withSchema("dataswmehfxrttxb") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("sennqfabq") - .withParameters(mapOf("xmsynbkd", "dataalectcxsfmbz", "brnxhjtlxfikj", "datanyufxuzmsvzyq"))) - .withParameters(mapOf("htrxbozp", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datavwwuasnjeg"))) - .withAnnotations(Arrays.asList("datanwjzbqblxrnwvdsv", "dataqizawwsds", "datargf")) - .withFolder(new DatasetFolder().withName("saw")) - .withDatabase("datamgfjculojhhylxd") - .withTable("datafiyymotuzb"); - model = BinaryData.fromObject(model).toObject(TeradataTableDataset.class); - Assertions.assertEquals("wjmtftcvelniru", model.description()); - Assertions.assertEquals("sennqfabq", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("htrxbozp").type()); - Assertions.assertEquals("saw", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java deleted file mode 100644 index 7ddd643e3e1f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TeradataTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.TeradataTableDatasetTypeProperties; - -public final class TeradataTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TeradataTableDatasetTypeProperties model - = BinaryData.fromString("{\"database\":\"datarn\",\"table\":\"dataocjnzdaiovrbhr\"}") - .toObject(TeradataTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TeradataTableDatasetTypeProperties model - = new TeradataTableDatasetTypeProperties().withDatabase("datarn").withTable("dataocjnzdaiovrbhr"); - model = BinaryData.fromObject(model).toObject(TeradataTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java deleted file mode 100644 index 814b5913f4a4..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TextFormatTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TextFormat; - -public final class TextFormatTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TextFormat model = BinaryData.fromString( - "{\"type\":\"TextFormat\",\"columnDelimiter\":\"datadpgzvzqazv\",\"rowDelimiter\":\"dataarkptgongruatsyi\",\"escapeChar\":\"datajqhenigb\",\"quoteChar\":\"datangu\",\"nullValue\":\"databyjdeayscse\",\"encodingName\":\"datazjemexmnvkvm\",\"treatEmptyAsNull\":\"datarxl\",\"skipLineCount\":\"datawmcpmrrdlhvdvm\",\"firstRowAsHeader\":\"datahbeae\",\"serializer\":\"datazmhkdclacroczfmu\",\"deserializer\":\"datarkeluxzshxzezbzu\",\"\":{\"vzske\":\"datal\",\"yu\":\"datacgwfsgqkstyecu\",\"sjcfma\":\"datajparda\"}}") - .toObject(TextFormat.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TextFormat model = new TextFormat().withSerializer("datazmhkdclacroczfmu") - .withDeserializer("datarkeluxzshxzezbzu") - .withColumnDelimiter("datadpgzvzqazv") - .withRowDelimiter("dataarkptgongruatsyi") - .withEscapeChar("datajqhenigb") - .withQuoteChar("datangu") - .withNullValue("databyjdeayscse") - .withEncodingName("datazjemexmnvkvm") - .withTreatEmptyAsNull("datarxl") - .withSkipLineCount("datawmcpmrrdlhvdvm") - .withFirstRowAsHeader("datahbeae"); - model = BinaryData.fromObject(model).toObject(TextFormat.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java deleted file mode 100644 index bdd6fb9b5666..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TransformationTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.Transformation; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TransformationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Transformation model = BinaryData.fromString( - "{\"name\":\"a\",\"description\":\"mhudfjecehokwcp\",\"dataset\":{\"referenceName\":\"wloesqrggvrbn\",\"parameters\":{\"uwj\":\"datakoilaci\",\"pjlh\":\"datae\"}},\"linkedService\":{\"referenceName\":\"xpzruzythqkk\",\"parameters\":{\"lnx\":\"datagxvellv\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"itmujdtvm\",\"datasetParameters\":\"datayymffhmjp\",\"parameters\":{\"zuvrzmzqmz\":\"datayx\"},\"\":{\"vnmdyfoeboj\":\"databr\"}}}") - .toObject(Transformation.class); - Assertions.assertEquals("a", model.name()); - Assertions.assertEquals("mhudfjecehokwcp", model.description()); - Assertions.assertEquals("wloesqrggvrbn", model.dataset().referenceName()); - Assertions.assertEquals("xpzruzythqkk", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("itmujdtvm", model.flowlet().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Transformation model = new Transformation().withName("a") - .withDescription("mhudfjecehokwcp") - .withDataset(new DatasetReference().withReferenceName("wloesqrggvrbn") - .withParameters(mapOf("uwj", "datakoilaci", "pjlh", "datae"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("xpzruzythqkk") - .withParameters(mapOf("lnx", "datagxvellv"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("itmujdtvm") - .withDatasetParameters("datayymffhmjp") - .withParameters(mapOf("zuvrzmzqmz", "datayx")) - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(Transformation.class); - Assertions.assertEquals("a", model.name()); - Assertions.assertEquals("mhudfjecehokwcp", model.description()); - Assertions.assertEquals("wloesqrggvrbn", model.dataset().referenceName()); - Assertions.assertEquals("xpzruzythqkk", model.linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.flowlet().type()); - Assertions.assertEquals("itmujdtvm", model.flowlet().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java deleted file mode 100644 index ddb923ea7b02..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerDependencyReferenceTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TriggerDependencyReference; -import com.azure.resourcemanager.datafactory.models.TriggerReference; -import com.azure.resourcemanager.datafactory.models.TriggerReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class TriggerDependencyReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerDependencyReference model = BinaryData.fromString( - "{\"type\":\"TriggerDependencyReference\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"sjaaedsqf\"}}") - .toObject(TriggerDependencyReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("sjaaedsqf", model.referenceTrigger().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerDependencyReference model = new TriggerDependencyReference().withReferenceTrigger( - new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("sjaaedsqf")); - model = BinaryData.fromObject(model).toObject(TriggerDependencyReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("sjaaedsqf", model.referenceTrigger().referenceName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java deleted file mode 100644 index 415808d2d95d..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerListResponseTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.TriggerResourceInner; -import com.azure.resourcemanager.datafactory.models.Trigger; -import com.azure.resourcemanager.datafactory.models.TriggerListResponse; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TriggerListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"type\":\"Trigger\",\"description\":\"nifmzzsdymbrnysu\",\"runtimeState\":\"Stopped\",\"annotations\":[\"datafwgckhocxvdfffw\"],\"\":{\"spave\":\"dataroud\",\"bunzozudh\":\"datahrv\"}},\"name\":\"gkmoyxcdyuibhmfd\",\"type\":\"zydvfvf\",\"etag\":\"naeo\",\"id\":\"rvhmgor\"},{\"properties\":{\"type\":\"Trigger\",\"description\":\"ukiscvwmzhw\",\"runtimeState\":\"Disabled\",\"annotations\":[\"dataxvxilcbtg\"],\"\":{\"vodggxdbee\":\"datazeyqxtjjfzqlqhyc\",\"wiuagydwqf\":\"datamieknlraria\",\"ocqwogfnzjvus\":\"dataylyrfgiagtco\"}},\"name\":\"ld\",\"type\":\"zuxylfsbtkadpyso\",\"etag\":\"btgkbugrjqctoj\",\"id\":\"isofieypefojyqd\"}],\"nextLink\":\"u\"}") - .toObject(TriggerListResponse.class); - Assertions.assertEquals("rvhmgor", model.value().get(0).id()); - Assertions.assertEquals("nifmzzsdymbrnysu", model.value().get(0).properties().description()); - Assertions.assertEquals("u", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerListResponse model = new TriggerListResponse() - .withValue(Arrays.asList( - new TriggerResourceInner().withId("rvhmgor") - .withProperties(new Trigger().withDescription("nifmzzsdymbrnysu") - .withAnnotations(Arrays.asList("datafwgckhocxvdfffw")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Stopped"))), - new TriggerResourceInner().withId("isofieypefojyqd") - .withProperties(new Trigger().withDescription("ukiscvwmzhw") - .withAnnotations(Arrays.asList("dataxvxilcbtg")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Disabled"))))) - .withNextLink("u"); - model = BinaryData.fromObject(model).toObject(TriggerListResponse.class); - Assertions.assertEquals("rvhmgor", model.value().get(0).id()); - Assertions.assertEquals("nifmzzsdymbrnysu", model.value().get(0).properties().description()); - Assertions.assertEquals("u", model.nextLink()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java deleted file mode 100644 index 7d952a602f08..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerPipelineReferenceTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TriggerPipelineReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerPipelineReference model = BinaryData.fromString( - "{\"pipelineReference\":{\"referenceName\":\"adswz\",\"name\":\"uyem\"},\"parameters\":{\"wyrmouv\":\"datauowhlxl\",\"rfowtdvrfmvlih\":\"datalgmokzkl\",\"hdxlw\":\"datavjdrqcrjidhftuk\"}}") - .toObject(TriggerPipelineReference.class); - Assertions.assertEquals("adswz", model.pipelineReference().referenceName()); - Assertions.assertEquals("uyem", model.pipelineReference().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerPipelineReference model = new TriggerPipelineReference() - .withPipelineReference(new PipelineReference().withReferenceName("adswz").withName("uyem")) - .withParameters( - mapOf("wyrmouv", "datauowhlxl", "rfowtdvrfmvlih", "datalgmokzkl", "hdxlw", "datavjdrqcrjidhftuk")); - model = BinaryData.fromObject(model).toObject(TriggerPipelineReference.class); - Assertions.assertEquals("adswz", model.pipelineReference().referenceName()); - Assertions.assertEquals("uyem", model.pipelineReference().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java deleted file mode 100644 index 9192092cfa79..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerReferenceTests.java +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TriggerReference; -import com.azure.resourcemanager.datafactory.models.TriggerReferenceType; -import org.junit.jupiter.api.Assertions; - -public final class TriggerReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerReference model - = BinaryData.fromString("{\"type\":\"TriggerReference\",\"referenceName\":\"qjpudupish\"}") - .toObject(TriggerReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.type()); - Assertions.assertEquals("qjpudupish", model.referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerReference model - = new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("qjpudupish"); - model = BinaryData.fromObject(model).toObject(TriggerReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.type()); - Assertions.assertEquals("qjpudupish", model.referenceName()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java deleted file mode 100644 index a394fc8aa9fe..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerResourceInnerTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.TriggerResourceInner; -import com.azure.resourcemanager.datafactory.models.Trigger; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TriggerResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerResourceInner model = BinaryData.fromString( - "{\"properties\":{\"type\":\"Trigger\",\"description\":\"cp\",\"runtimeState\":\"Started\",\"annotations\":[\"dataihih\",\"datahzdsqtzbsrgnow\",\"datajhf\",\"datamvec\"],\"\":{\"ekqvgqouwif\":\"dataxmwoteyowcluqo\",\"ivqikfxcvhr\":\"datampjw\",\"c\":\"datasphuagrttikteus\"}},\"name\":\"vyklxuby\",\"type\":\"ff\",\"etag\":\"fblcq\",\"id\":\"ubgq\"}") - .toObject(TriggerResourceInner.class); - Assertions.assertEquals("ubgq", model.id()); - Assertions.assertEquals("cp", model.properties().description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerResourceInner model = new TriggerResourceInner().withId("ubgq") - .withProperties(new Trigger().withDescription("cp") - .withAnnotations(Arrays.asList("dataihih", "datahzdsqtzbsrgnow", "datajhf", "datamvec")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Started"))); - model = BinaryData.fromObject(model).toObject(TriggerResourceInner.class); - Assertions.assertEquals("ubgq", model.id()); - Assertions.assertEquals("cp", model.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java deleted file mode 100644 index 1ab1ed51cd09..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunTests.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.core.util.serializer.JacksonAdapter; -import com.azure.core.util.serializer.SerializerEncoding; -import com.azure.resourcemanager.datafactory.models.TriggerRun; -import java.util.HashMap; -import java.util.Map; - -public final class TriggerRunTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerRun model = BinaryData.fromString( - "{\"triggerRunId\":\"hcz\",\"triggerName\":\"rxzbujr\",\"triggerType\":\"hqvwrevkhgnlnzon\",\"triggerRunTimestamp\":\"2021-07-02T07:36:09Z\",\"status\":\"Inprogress\",\"message\":\"yw\",\"properties\":{\"zehtdhgb\":\"jtszcof\",\"reljeamur\":\"k\",\"xlpm\":\"zmlovuanash\"},\"triggeredPipelines\":{\"sdbccxjmonfdgnwn\":\"bdkelvidizo\",\"keifzzhmkdasv\":\"ypuuwwltvuqjctze\",\"cu\":\"lyhb\"},\"runDimension\":{\"lvizb\":\"xgsrboldforobw\",\"dxe\":\"hfovvacqpbtu\",\"elawumu\":\"zab\"},\"dependencyStatus\":{\"ucwyhahno\":\"datazkwrrwoyc\",\"fuurutlwexx\":\"datadrkywuhps\",\"srzpgepqtybbww\":\"datalalniex\"},\"\":{\"xkjibnxmy\":\"dataakchzyvlixqnrk\",\"ijpstte\":\"datauxswqrntvl\",\"wcyyufmhruncu\":\"dataoqq\"}}") - .toObject(TriggerRun.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerRun model = new TriggerRun().withAdditionalProperties(mapOf("triggerRunId", "hcz", "triggerName", - "rxzbujr", "runDimension", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize("{\"lvizb\":\"xgsrboldforobw\",\"dxe\":\"hfovvacqpbtu\",\"elawumu\":\"zab\"}", - Object.class, SerializerEncoding.JSON), - "dependencyStatus", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"ucwyhahno\":\"datazkwrrwoyc\",\"fuurutlwexx\":\"datadrkywuhps\",\"srzpgepqtybbww\":\"datalalniex\"}", - Object.class, SerializerEncoding.JSON), - "triggeredPipelines", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize( - "{\"sdbccxjmonfdgnwn\":\"bdkelvidizo\",\"keifzzhmkdasv\":\"ypuuwwltvuqjctze\",\"cu\":\"lyhb\"}", - Object.class, SerializerEncoding.JSON), - "triggerType", "hqvwrevkhgnlnzon", "triggerRunTimestamp", "2021-07-02T07:36:09Z", "message", "yw", - "properties", - JacksonAdapter.createDefaultSerializerAdapter() - .deserialize("{\"zehtdhgb\":\"jtszcof\",\"reljeamur\":\"k\",\"xlpm\":\"zmlovuanash\"}", Object.class, - SerializerEncoding.JSON), - "status", "Inprogress")); - model = BinaryData.fromObject(model).toObject(TriggerRun.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java deleted file mode 100644 index f8c216793f6b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsCancelWithResponseMockTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggerRunsCancelWithResponseMockTests { - @Test - public void testCancelWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.triggerRuns() - .cancelWithResponse("qro", "knenpybuskvj", "fgkqudxvjrndbi", "qqrkkgawna", - com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java deleted file mode 100644 index 04e0af8b7445..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerRunsRerunWithResponseMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggerRunsRerunWithResponseMockTests { - @Test - public void testRerunWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.triggerRuns().rerunWithResponse("unsvsjo", "anxs", "lt", "ghykqxr", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java deleted file mode 100644 index 178bb20b1501..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerSubscriptionOperationStatusInnerTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.TriggerSubscriptionOperationStatusInner; - -public final class TriggerSubscriptionOperationStatusInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TriggerSubscriptionOperationStatusInner model - = BinaryData.fromString("{\"triggerName\":\"n\",\"status\":\"Provisioning\"}") - .toObject(TriggerSubscriptionOperationStatusInner.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TriggerSubscriptionOperationStatusInner model = new TriggerSubscriptionOperationStatusInner(); - model = BinaryData.fromObject(model).toObject(TriggerSubscriptionOperationStatusInner.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java deleted file mode 100644 index ca8bf5b8e842..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggerTests.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Trigger; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - Trigger model = BinaryData.fromString( - "{\"type\":\"Trigger\",\"description\":\"rtalmet\",\"runtimeState\":\"Started\",\"annotations\":[\"dataslqxi\",\"datahrmooi\",\"dataqseypxiutcxa\",\"datazhyrpeto\"],\"\":{\"rqnkkzjcjbtr\":\"datajoxslhvnhla\",\"eitpkxztmo\":\"dataaehvvibrxjjstoq\"}}") - .toObject(Trigger.class); - Assertions.assertEquals("rtalmet", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - Trigger model = new Trigger().withDescription("rtalmet") - .withAnnotations(Arrays.asList("dataslqxi", "datahrmooi", "dataqseypxiutcxa", "datazhyrpeto")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Started")); - model = BinaryData.fromObject(model).toObject(Trigger.class); - Assertions.assertEquals("rtalmet", model.description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java deleted file mode 100644 index cfe2f9db7734..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersCreateOrUpdateWithResponseMockTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.Trigger; -import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersCreateOrUpdateWithResponseMockTests { - @Test - public void testCreateOrUpdateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Trigger\",\"description\":\"ohjwzynbhltrmbh\",\"runtimeState\":\"Started\",\"annotations\":[\"datafimllradqwpu\",\"datavuphizztklkshdee\",\"datatjmdefkphs\",\"datakivyaf\"],\"\":{\"rkf\":\"datannpjulpwwmxwlwc\"}},\"name\":\"jaz\",\"type\":\"bjukikdcv\",\"etag\":\"xm\",\"id\":\"ugujiwi\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerResource response = manager.triggers() - .define("mmglbxoeghordccp") - .withExistingFactory("jpkzmhaxtw", "imlfrkmyn") - .withProperties(new Trigger().withDescription("wzc") - .withAnnotations(Arrays.asList("dataccaiphsartyiqq", "datadgyshpvva")) - .withAdditionalProperties(mapOf("type", "Trigger", "runtimeState", "Started"))) - .withIfMatch("plhfwqdvd") - .create(); - - Assertions.assertEquals("ugujiwi", response.id()); - Assertions.assertEquals("ohjwzynbhltrmbh", response.properties().description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java deleted file mode 100644 index 73c698a66554..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersDeleteWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersDeleteWithResponseMockTests { - @Test - public void testDeleteWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.triggers() - .deleteWithResponse("cmgezapuun", "yokftdlwezplnzvr", "gjweelkviki", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java deleted file mode 100644 index 30cbd01e8560..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetEventSubscriptionStatusWithResponseMockTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersGetEventSubscriptionStatusWithResponseMockTests { - @Test - public void testGetEventSubscriptionStatusWithResponse() throws Exception { - String responseStr = "{\"triggerName\":\"lwyet\",\"status\":\"Disabled\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response = manager.triggers() - .getEventSubscriptionStatusWithResponse("fryket", "rzxbmiyifkgmyqnj", "jyppixfubkfj", - com.azure.core.util.Context.NONE) - .getValue(); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java deleted file mode 100644 index 4f81ff4f0991..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersGetWithResponseMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersGetWithResponseMockTests { - @Test - public void testGetWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"type\":\"Trigger\",\"description\":\"oo\",\"runtimeState\":\"Started\",\"annotations\":[\"dataqioqhphjq\",\"datakacwkoqmxkxfm\",\"databr\",\"dataslforlaudemzrpdn\"],\"\":{\"bbg\":\"datajb\"}},\"name\":\"okn\",\"type\":\"dqfynrdagmihxjpf\",\"etag\":\"puibczlre\",\"id\":\"rmqb\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerResource response = manager.triggers() - .getWithResponse("ltzyua", "nlxcdpjssdt", "snlxwqmzez", "hfjjjzcxtzk", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("rmqb", response.id()); - Assertions.assertEquals("oo", response.properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java deleted file mode 100644 index c483c0eb861b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersListByFactoryMockTests.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.TriggerResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersListByFactoryMockTests { - @Test - public void testListByFactory() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"type\":\"Trigger\",\"description\":\"rsravsscblsxm\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datad\",\"datauzclfbvvuyoilni\",\"datawxwaquuvbb\",\"datagxsfeslxwlmx\"],\"\":{\"ktblom\":\"databi\"}},\"name\":\"vicdqufjahucmeb\",\"type\":\"cklthsuasnxdhlov\",\"etag\":\"rfdipsshxx\",\"id\":\"bydryysvex\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.triggers().listByFactory("nfqwjww", "sfjqxlbclvpgbu", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("bydryysvex", response.iterator().next().id()); - Assertions.assertEquals("rsravsscblsxm", response.iterator().next().properties().description()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java deleted file mode 100644 index bf30abad7123..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStartMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersStartMockTests { - @Test - public void testStart() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.triggers().start("zfoxdtzzmcrm", "hfcaiz", "uiyuzufdmsbvyg", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java deleted file mode 100644 index 8d703e80cc85..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersStopMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersStopMockTests { - @Test - public void testStop() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.triggers().stop("fljv", "aqkoecozfauh", "xxdyahlgrz", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java deleted file mode 100644 index bfd7300e9a7c..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersSubscribeToEventsMockTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersSubscribeToEventsMockTests { - @Test - public void testSubscribeToEvents() throws Exception { - String responseStr = "{\"triggerName\":\"ujfc\",\"status\":\"Provisioning\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response - = manager.triggers().subscribeToEvents("z", "rkwgsq", "srpcxwthklj", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java deleted file mode 100644 index 17efe2683086..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TriggersUnsubscribeFromEventsMockTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.datafactory.DataFactoryManager; -import com.azure.resourcemanager.datafactory.models.TriggerSubscriptionOperationStatus; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class TriggersUnsubscribeFromEventsMockTests { - @Test - public void testUnsubscribeFromEvents() throws Exception { - String responseStr = "{\"triggerName\":\"rbwbkrsmkeiunxtb\",\"status\":\"Deprovisioning\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - DataFactoryManager manager = DataFactoryManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - TriggerSubscriptionOperationStatus response = manager.triggers() - .unsubscribeFromEvents("hi", "pacivanly", "spnjlopoangrlma", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java deleted file mode 100644 index c6f1a0ecfe5a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerDependencyReferenceTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TriggerReference; -import com.azure.resourcemanager.datafactory.models.TriggerReferenceType; -import com.azure.resourcemanager.datafactory.models.TumblingWindowTriggerDependencyReference; -import org.junit.jupiter.api.Assertions; - -public final class TumblingWindowTriggerDependencyReferenceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TumblingWindowTriggerDependencyReference model = BinaryData.fromString( - "{\"type\":\"TumblingWindowTriggerDependencyReference\",\"offset\":\"lndywgh\",\"size\":\"tfv\",\"referenceTrigger\":{\"type\":\"TriggerReference\",\"referenceName\":\"jnrom\"}}") - .toObject(TumblingWindowTriggerDependencyReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("jnrom", model.referenceTrigger().referenceName()); - Assertions.assertEquals("lndywgh", model.offset()); - Assertions.assertEquals("tfv", model.size()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TumblingWindowTriggerDependencyReference model = new TumblingWindowTriggerDependencyReference() - .withReferenceTrigger( - new TriggerReference().withType(TriggerReferenceType.TRIGGER_REFERENCE).withReferenceName("jnrom")) - .withOffset("lndywgh") - .withSize("tfv"); - model = BinaryData.fromObject(model).toObject(TumblingWindowTriggerDependencyReference.class); - Assertions.assertEquals(TriggerReferenceType.TRIGGER_REFERENCE, model.referenceTrigger().type()); - Assertions.assertEquals("jnrom", model.referenceTrigger().referenceName()); - Assertions.assertEquals("lndywgh", model.offset()); - Assertions.assertEquals("tfv", model.size()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java deleted file mode 100644 index b0cd6276351e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTests.java +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DependencyReference; -import com.azure.resourcemanager.datafactory.models.PipelineReference; -import com.azure.resourcemanager.datafactory.models.RetryPolicy; -import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference; -import com.azure.resourcemanager.datafactory.models.TumblingWindowFrequency; -import com.azure.resourcemanager.datafactory.models.TumblingWindowTrigger; -import java.time.OffsetDateTime; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class TumblingWindowTriggerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TumblingWindowTrigger model = BinaryData.fromString( - "{\"type\":\"TumblingWindowTrigger\",\"pipeline\":{\"pipelineReference\":{\"referenceName\":\"spnsbbhdjee\",\"name\":\"lcykihymdgukfmk\"},\"parameters\":{\"mohrllxjyxhwv\":\"datazvxknyg\",\"wnudd\":\"datayupszch\",\"ib\":\"dataazvsmnxblc\",\"eh\":\"datamgfwdxukmeo\"}},\"typeProperties\":{\"frequency\":\"Hour\",\"interval\":908330647,\"startTime\":\"2021-10-27T03:53:44Z\",\"endTime\":\"2021-02-20T05:31:07Z\",\"delay\":\"dataaiiuqmda\",\"maxConcurrency\":253707305,\"retryPolicy\":{\"count\":\"datanvmfmry\",\"intervalInSeconds\":1812804884},\"dependsOn\":[{\"type\":\"DependencyReference\"}]},\"description\":\"rvwgpj\",\"runtimeState\":\"Disabled\",\"annotations\":[\"datautdzhkbc\",\"datauavotfmgtxz\"],\"\":{\"qpbbjcznxd\":\"datazqmlkrx\",\"kjirti\":\"dataiwaaumy\"}}") - .toObject(TumblingWindowTrigger.class); - Assertions.assertEquals("rvwgpj", model.description()); - Assertions.assertEquals("spnsbbhdjee", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("lcykihymdgukfmk", model.pipeline().pipelineReference().name()); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(908330647, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-27T03:53:44Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-20T05:31:07Z"), model.endTime()); - Assertions.assertEquals(253707305, model.maxConcurrency()); - Assertions.assertEquals(1812804884, model.retryPolicy().intervalInSeconds()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TumblingWindowTrigger model = new TumblingWindowTrigger().withDescription("rvwgpj") - .withAnnotations(Arrays.asList("datautdzhkbc", "datauavotfmgtxz")) - .withPipeline(new TriggerPipelineReference() - .withPipelineReference( - new PipelineReference().withReferenceName("spnsbbhdjee").withName("lcykihymdgukfmk")) - .withParameters(mapOf("mohrllxjyxhwv", "datazvxknyg", "wnudd", "datayupszch", "ib", "dataazvsmnxblc", - "eh", "datamgfwdxukmeo"))) - .withFrequency(TumblingWindowFrequency.HOUR) - .withInterval(908330647) - .withStartTime(OffsetDateTime.parse("2021-10-27T03:53:44Z")) - .withEndTime(OffsetDateTime.parse("2021-02-20T05:31:07Z")) - .withDelay("dataaiiuqmda") - .withMaxConcurrency(253707305) - .withRetryPolicy(new RetryPolicy().withCount("datanvmfmry").withIntervalInSeconds(1812804884)) - .withDependsOn(Arrays.asList(new DependencyReference())); - model = BinaryData.fromObject(model).toObject(TumblingWindowTrigger.class); - Assertions.assertEquals("rvwgpj", model.description()); - Assertions.assertEquals("spnsbbhdjee", model.pipeline().pipelineReference().referenceName()); - Assertions.assertEquals("lcykihymdgukfmk", model.pipeline().pipelineReference().name()); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(908330647, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-27T03:53:44Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-20T05:31:07Z"), model.endTime()); - Assertions.assertEquals(253707305, model.maxConcurrency()); - Assertions.assertEquals(1812804884, model.retryPolicy().intervalInSeconds()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java deleted file mode 100644 index ab6be6de3b81..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TumblingWindowTriggerTypePropertiesTests.java +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.TumblingWindowTriggerTypeProperties; -import com.azure.resourcemanager.datafactory.models.DependencyReference; -import com.azure.resourcemanager.datafactory.models.RetryPolicy; -import com.azure.resourcemanager.datafactory.models.TumblingWindowFrequency; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class TumblingWindowTriggerTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TumblingWindowTriggerTypeProperties model = BinaryData.fromString( - "{\"frequency\":\"Hour\",\"interval\":1864030718,\"startTime\":\"2021-07-18T12:19:55Z\",\"endTime\":\"2021-10-02T05:42:52Z\",\"delay\":\"datagonrrarznlrr\",\"maxConcurrency\":896190776,\"retryPolicy\":{\"count\":\"dataaejbmt\",\"intervalInSeconds\":519972716},\"dependsOn\":[{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"},{\"type\":\"DependencyReference\"}]}") - .toObject(TumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1864030718, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-18T12:19:55Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-02T05:42:52Z"), model.endTime()); - Assertions.assertEquals(896190776, model.maxConcurrency()); - Assertions.assertEquals(519972716, model.retryPolicy().intervalInSeconds()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TumblingWindowTriggerTypeProperties model - = new TumblingWindowTriggerTypeProperties().withFrequency(TumblingWindowFrequency.HOUR) - .withInterval(1864030718) - .withStartTime(OffsetDateTime.parse("2021-07-18T12:19:55Z")) - .withEndTime(OffsetDateTime.parse("2021-10-02T05:42:52Z")) - .withDelay("datagonrrarznlrr") - .withMaxConcurrency(896190776) - .withRetryPolicy(new RetryPolicy().withCount("dataaejbmt").withIntervalInSeconds(519972716)) - .withDependsOn(Arrays.asList(new DependencyReference(), new DependencyReference(), - new DependencyReference(), new DependencyReference())); - model = BinaryData.fromObject(model).toObject(TumblingWindowTriggerTypeProperties.class); - Assertions.assertEquals(TumblingWindowFrequency.HOUR, model.frequency()); - Assertions.assertEquals(1864030718, model.interval()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-18T12:19:55Z"), model.startTime()); - Assertions.assertEquals(OffsetDateTime.parse("2021-10-02T05:42:52Z"), model.endTime()); - Assertions.assertEquals(896190776, model.maxConcurrency()); - Assertions.assertEquals(519972716, model.retryPolicy().intervalInSeconds()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java deleted file mode 100644 index 76e505259305..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/TypeConversionSettingsTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.TypeConversionSettings; - -public final class TypeConversionSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - TypeConversionSettings model = BinaryData.fromString( - "{\"allowDataTruncation\":\"datarvtaul\",\"treatBooleanAsNumber\":\"dataqvtpkodijcn\",\"dateTimeFormat\":\"datao\",\"dateTimeOffsetFormat\":\"datavcyqjjxhijbfi\",\"timeSpanFormat\":\"datahoxule\",\"culture\":\"datadbirhgjmph\"}") - .toObject(TypeConversionSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - TypeConversionSettings model = new TypeConversionSettings().withAllowDataTruncation("datarvtaul") - .withTreatBooleanAsNumber("dataqvtpkodijcn") - .withDateTimeFormat("datao") - .withDateTimeOffsetFormat("datavcyqjjxhijbfi") - .withTimeSpanFormat("datahoxule") - .withCulture("datadbirhgjmph"); - model = BinaryData.fromObject(model).toObject(TypeConversionSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java deleted file mode 100644 index 5b25e32d3b90..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTests.java +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.UntilActivity; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class UntilActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UntilActivity model = BinaryData.fromString( - "{\"type\":\"Until\",\"typeProperties\":{\"expression\":{\"value\":\"zo\"},\"timeout\":\"dataqm\",\"activities\":[{\"type\":\"Activity\",\"name\":\"hbvfallpo\",\"description\":\"vxntsfyntkf\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"h\",\"dependencyConditions\":[\"Skipped\",\"Failed\"],\"\":{\"fgpj\":\"datadsn\",\"xephwxd\":\"datakraenzuufpdwk\",\"meq\":\"datalow\"}},{\"activity\":\"q\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"pleooom\":\"datamxuoxk\",\"qucgaofo\":\"dataqdjfldzvgo\"}}],\"userProperties\":[{\"name\":\"qnaxfvsyustrb\",\"value\":\"dataexbjbknpzhfh\"},{\"name\":\"bhgw\",\"value\":\"datasl\"},{\"name\":\"qb\",\"value\":\"datatcjbxochijwps\"}],\"\":{\"mmvatrvjkxcrx\":\"dataprumhikwahbzdgwk\",\"terjer\":\"datapenkujxdniap\",\"kdykxhxrk\":\"datawgiuduwxqytppjdy\",\"y\":\"datatu\"}},{\"type\":\"Activity\",\"name\":\"trpdgmu\",\"description\":\"mcv\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"lcrjynefx\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"yfzavs\":\"datavn\"}},{\"activity\":\"jbahshyxhfe\",\"dependencyConditions\":[\"Failed\",\"Failed\",\"Skipped\"],\"\":{\"fsmpbwwphjwqmc\":\"dataysmqeeodfpl\",\"lxqmtedzxujx\":\"dataofxgwyvjef\"}},{\"activity\":\"xjrttzhn\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\",\"Failed\",\"Succeeded\"],\"\":{\"rqhhbddxko\":\"dataqcovpjvrs\",\"vepld\":\"dataorcmayqas\",\"ubqcqnch\":\"datafxmpyvlfujsbcfog\",\"knblb\":\"datazyjug\"}}],\"userProperties\":[{\"name\":\"vcpisvprumttr\",\"value\":\"datakhugxtxxwb\"}],\"\":{\"mxplrtuegq\":\"datanlmpmvegxg\"}},{\"type\":\"Activity\",\"name\":\"qulnjeybgpjyu\",\"description\":\"uowkt\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"ydqcge\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"w\":\"datazmvttttjmdtf\",\"esgurpcwpbtumtt\":\"dataaeekom\",\"rbazgou\":\"dataixewp\"}},{\"activity\":\"hvqnkwjhjut\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"ncmuvf\":\"dataldflgqsoi\",\"erqb\":\"dataklepetsxetne\",\"kzcdnipj\":\"datalmsxnx\",\"vsvgydtdto\":\"datad\"}}],\"userProperties\":[{\"name\":\"zotmiizk\",\"value\":\"datapooaskflrqwfmbk\"},{\"name\":\"shbrzvnouthbvv\",\"value\":\"databwudiyfixpwrrqiv\"},{\"name\":\"zqcmrxh\",\"value\":\"datalozg\"},{\"name\":\"fhijcetcystrs\",\"value\":\"datayttxspaafs\"}],\"\":{\"tqnxhulgtqve\":\"datayoerlr\",\"kjzbxmgsxbk\":\"datamwbmqpbfjbsoljqk\"}},{\"type\":\"Activity\",\"name\":\"kambdoq\",\"description\":\"o\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"rbjhyld\",\"dependencyConditions\":[\"Failed\",\"Succeeded\"],\"\":{\"aqwnkjyfy\":\"dataczikfx\",\"ashgryofhuv\":\"datambtiugc\"}},{\"activity\":\"okrkibnonuoc\",\"dependencyConditions\":[\"Failed\"],\"\":{\"nmzsvdrryzxh\":\"dataaxkbyovwtpmyva\",\"hpy\":\"datavy\"}},{\"activity\":\"lmfvqvyzacjxczj\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Succeeded\",\"Skipped\"],\"\":{\"hn\":\"databjkhtmmkmezlhmtr\",\"iwxwwpitwlel\":\"datascaynhzm\",\"txfzhvxqotwcfbqz\":\"dataqqlpphotbsgkliu\",\"hyxxftrfwmxwjc\":\"datazchpjh\"}}],\"userProperties\":[{\"name\":\"kmona\",\"value\":\"dataleof\"},{\"name\":\"xznopk\",\"value\":\"dataoffeutvqgnugiiyc\"},{\"name\":\"jf\",\"value\":\"datakntdynbrf\"},{\"name\":\"crabrqdbxhg\",\"value\":\"datalz\"}],\"\":{\"fziixyxntuz\":\"datavnlubkb\",\"pcmnpo\":\"dataceuz\",\"fayophpudccaqhb\":\"datasqilmvx\",\"rgvzjtvjrrk\":\"datavbutesxufrwiive\"}}]},\"name\":\"lweozccdo\",\"description\":\"jnkthehjmij\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"hhci\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\"],\"\":{\"ctcbmnecozvx\":\"datagwqgbv\",\"puwjvju\":\"databztwkz\",\"xdzopfkz\":\"dataxbtkuviuxtyvpve\"}},{\"activity\":\"xjnxznlx\",\"dependencyConditions\":[\"Completed\",\"Completed\",\"Completed\",\"Failed\"],\"\":{\"ktqsbmurb\":\"datatqv\",\"o\":\"databtvsxn\",\"wfjylhmmibaowc\":\"datahlrhjik\"}},{\"activity\":\"bznwegyhzucpixfd\",\"dependencyConditions\":[\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"hyx\":\"datacbdpyorhqbpfvh\"}},{\"activity\":\"wsnhszmuvarea\",\"dependencyConditions\":[\"Skipped\",\"Completed\"],\"\":{\"npcrsfqwqm\":\"datanmnmqydpieleruoy\"}}],\"userProperties\":[{\"name\":\"j\",\"value\":\"dataonvjur\"},{\"name\":\"czdelqazb\",\"value\":\"dataixg\"}],\"\":{\"uvqacae\":\"databhwwpaec\",\"oqjmo\":\"datavn\",\"brrqxldkhgngyofe\":\"datagdb\",\"ncxkazmydsqvjkfz\":\"datajksmyeegbertf\"}}") - .toObject(UntilActivity.class); - Assertions.assertEquals("lweozccdo", model.name()); - Assertions.assertEquals("jnkthehjmij", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("hhci", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("j", model.userProperties().get(0).name()); - Assertions.assertEquals("zo", model.expression().value()); - Assertions.assertEquals("hbvfallpo", model.activities().get(0).name()); - Assertions.assertEquals("vxntsfyntkf", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("h", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("qnaxfvsyustrb", model.activities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UntilActivity model - = new UntilActivity().withName("lweozccdo") - .withDescription("jnkthehjmij") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("hhci") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xjnxznlx") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.COMPLETED, DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("bznwegyhzucpixfd") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("wsnhszmuvarea") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("j").withValue("dataonvjur"), - new UserProperty().withName("czdelqazb").withValue("dataixg"))) - .withExpression(new Expression().withValue("zo")) - .withTimeout("dataqm") - .withActivities( - Arrays - .asList( - new Activity().withName("hbvfallpo") - .withDescription("vxntsfyntkf") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays - .asList( - new ActivityDependency().withActivity("h") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("q") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("qnaxfvsyustrb").withValue("dataexbjbknpzhfh"), - new UserProperty().withName("bhgw").withValue("datasl"), - new UserProperty().withName("qb").withValue("datatcjbxochijwps"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("trpdgmu") - .withDescription("mcv") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("lcrjynefx") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("jbahshyxhfe") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.FAILED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xjrttzhn") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("vcpisvprumttr").withValue("datakhugxtxxwb"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("qulnjeybgpjyu") - .withDescription("uowkt") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("ydqcge") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("hvqnkwjhjut") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays - .asList(new UserProperty().withName("zotmiizk").withValue("datapooaskflrqwfmbk"), - new UserProperty().withName("shbrzvnouthbvv").withValue("databwudiyfixpwrrqiv"), - new UserProperty().withName("zqcmrxh").withValue("datalozg"), - new UserProperty().withName("fhijcetcystrs").withValue("datayttxspaafs"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("kambdoq") - .withDescription("o") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("rbjhyld") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("okrkibnonuoc") - .withDependencyConditions(Arrays.asList(DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("lmfvqvyzacjxczj") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.SUCCEEDED, - DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("kmona").withValue("dataleof"), - new UserProperty().withName("xznopk").withValue("dataoffeutvqgnugiiyc"), - new UserProperty().withName("jf").withValue("datakntdynbrf"), - new UserProperty().withName("crabrqdbxhg").withValue("datalz"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(UntilActivity.class); - Assertions.assertEquals("lweozccdo", model.name()); - Assertions.assertEquals("jnkthehjmij", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.onInactiveMarkAs()); - Assertions.assertEquals("hhci", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("j", model.userProperties().get(0).name()); - Assertions.assertEquals("zo", model.expression().value()); - Assertions.assertEquals("hbvfallpo", model.activities().get(0).name()); - Assertions.assertEquals("vxntsfyntkf", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("h", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SKIPPED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("qnaxfvsyustrb", model.activities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java deleted file mode 100644 index 3ab82b080758..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UntilActivityTypePropertiesTests.java +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.UntilActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.Activity; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.Expression; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class UntilActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UntilActivityTypeProperties model = BinaryData.fromString( - "{\"expression\":{\"value\":\"rd\"},\"timeout\":\"datawgcmmvvbwrilcyep\",\"activities\":[{\"type\":\"Activity\",\"name\":\"dvgdujk\",\"description\":\"cuvyeckbud\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Skipped\",\"dependsOn\":[{\"activity\":\"dfy\",\"dependencyConditions\":[\"Succeeded\",\"Skipped\"],\"\":{\"pkvujwfyvb\":\"dataoiywmehaicfkk\",\"ii\":\"datavnbbeysef\"}},{\"activity\":\"d\",\"dependencyConditions\":[\"Skipped\"],\"\":{\"vfbrgtoqk\":\"dataveuqgptzxilw\",\"kjhu\":\"datazl\",\"idk\":\"datawiitxye\",\"bwdu\":\"dataehhkcutxmqvbh\"}},{\"activity\":\"vkrskqgokhpzvph\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"furkdhopzym\":\"datax\",\"jiezk\":\"datarfwchimgeo\",\"svyo\":\"datadexldocqhl\",\"qwfuavofeouucg\":\"dataiexmfeechltxa\"}},{\"activity\":\"i\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Succeeded\"],\"\":{\"imenjhtwkn\":\"datasegdjqnochnmxbhg\",\"pz\":\"datazcwjaqyvnol\"}}],\"userProperties\":[{\"name\":\"bss\",\"value\":\"dataquiqkuxajl\"},{\"name\":\"iffzpkrno\",\"value\":\"dataexfyk\"},{\"name\":\"ircwbnmai\",\"value\":\"datadjoi\"},{\"name\":\"xngm\",\"value\":\"datavjrxoidmns\"}],\"\":{\"kjlhkcogxrs\":\"datam\",\"e\":\"datayfiochfx\",\"eudhvszwgmpzbx\":\"dataybjynzo\",\"ushzfnlqnr\":\"datafmhypwglkvspbd\"}},{\"type\":\"Activity\",\"name\":\"smrvpswe\",\"description\":\"dawhzdhszku\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Succeeded\",\"dependsOn\":[{\"activity\":\"zmyvdabgctm\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Completed\"],\"\":{\"ubszgzn\":\"datanokpkg\",\"kd\":\"datadkxhhlinje\"}},{\"activity\":\"rchnrhg\",\"dependencyConditions\":[\"Failed\",\"Completed\"],\"\":{\"rqhqqkhzpwsa\":\"datavqjthluo\",\"vxwnmiumduwpq\":\"datawsentrcdz\",\"fbgkyonadtywzrnx\":\"datadduvxmrbbgli\",\"ygjbcfprioab\":\"dataktokiptxmdad\"}},{\"activity\":\"xwid\",\"dependencyConditions\":[\"Skipped\",\"Failed\",\"Failed\"],\"\":{\"moguotexlpq\":\"datalrsmxtsywrmmha\",\"mnseigoalxwuq\":\"datadgfzetda\",\"zrxhghsmlxogim\":\"dataczrskdovgkpq\",\"lxawixdcy\":\"datahxyx\"}},{\"activity\":\"dqamiy\",\"dependencyConditions\":[\"Completed\"],\"\":{\"dzoauvwjkgpzco\":\"databca\",\"aqxztywzaq\":\"datawcnnzacqludq\",\"zlzpowsefpg\":\"datafqtstmyfebb\",\"pzbsytwt\":\"dataw\"}}],\"userProperties\":[{\"name\":\"dtsvgyzmafqsn\",\"value\":\"datau\"},{\"name\":\"uubyvwe\",\"value\":\"datayyngw\"}],\"\":{\"vxcompd\":\"datahrxoekyf\"}},{\"type\":\"Activity\",\"name\":\"nsmh\",\"description\":\"z\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"jxjaaocjlwco\",\"dependencyConditions\":[\"Completed\",\"Skipped\",\"Completed\",\"Failed\"],\"\":{\"veyngzj\":\"dataymkzbliukvmzxr\"}},{\"activity\":\"jbklt\",\"dependencyConditions\":[\"Completed\",\"Succeeded\",\"Completed\"],\"\":{\"hgcqhlfq\":\"datahu\"}},{\"activity\":\"mjldeluqqnf\",\"dependencyConditions\":[\"Succeeded\"],\"\":{\"dhtdapkdahyn\":\"dataluomaltvvp\",\"iclsxuibyfylhf\":\"datatixrkjogyqrmt\"}},{\"activity\":\"qpauqylmlun\",\"dependencyConditions\":[\"Succeeded\",\"Completed\",\"Failed\"],\"\":{\"lx\":\"datacwuz\"}}],\"userProperties\":[{\"name\":\"nq\",\"value\":\"datadnaidacsku\"},{\"name\":\"fq\",\"value\":\"dataxzdlfswubjvs\"}],\"\":{\"ceuwfmrckatnji\":\"datarknnr\"}}]}") - .toObject(UntilActivityTypeProperties.class); - Assertions.assertEquals("rd", model.expression().value()); - Assertions.assertEquals("dvgdujk", model.activities().get(0).name()); - Assertions.assertEquals("cuvyeckbud", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("dfy", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bss", model.activities().get(0).userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UntilActivityTypeProperties model - = new UntilActivityTypeProperties().withExpression(new Expression().withValue("rd")) - .withTimeout("datawgcmmvvbwrilcyep") - .withActivities( - Arrays - .asList( - new Activity().withName("dvgdujk") - .withDescription("cuvyeckbud") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SKIPPED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("dfy") - .withDependencyConditions(Arrays - .asList(DependencyCondition.SUCCEEDED, DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("d") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("vkrskqgokhpzvph") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("i") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList( - new UserProperty().withName("bss").withValue("dataquiqkuxajl"), - new UserProperty().withName("iffzpkrno").withValue("dataexfyk"), - new UserProperty().withName("ircwbnmai").withValue("datadjoi"), - new UserProperty().withName("xngm").withValue("datavjrxoidmns"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("smrvpswe") - .withDescription("dawhzdhszku") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.SUCCEEDED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("zmyvdabgctm") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("rchnrhg") - .withDependencyConditions( - Arrays.asList(DependencyCondition.FAILED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("xwid") - .withDependencyConditions(Arrays.asList(DependencyCondition.SKIPPED, - DependencyCondition.FAILED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("dqamiy") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty() - .withName("dtsvgyzmafqsn") - .withValue("datau"), new UserProperty().withName("uubyvwe").withValue("datayyngw"))) - .withAdditionalProperties(mapOf("type", "Activity")), - new Activity().withName("nsmh") - .withDescription("z") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn( - Arrays.asList( - new ActivityDependency().withActivity("jxjaaocjlwco") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SKIPPED, DependencyCondition.COMPLETED, - DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("jbklt") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, - DependencyCondition.SUCCEEDED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("mjldeluqqnf") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("qpauqylmlun") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, - DependencyCondition.COMPLETED, DependencyCondition.FAILED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties( - Arrays.asList(new UserProperty().withName("nq").withValue("datadnaidacsku"), - new UserProperty().withName("fq").withValue("dataxzdlfswubjvs"))) - .withAdditionalProperties(mapOf("type", "Activity")))); - model = BinaryData.fromObject(model).toObject(UntilActivityTypeProperties.class); - Assertions.assertEquals("rd", model.expression().value()); - Assertions.assertEquals("dvgdujk", model.activities().get(0).name()); - Assertions.assertEquals("cuvyeckbud", model.activities().get(0).description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.activities().get(0).state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.SKIPPED, model.activities().get(0).onInactiveMarkAs()); - Assertions.assertEquals("dfy", model.activities().get(0).dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, - model.activities().get(0).dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("bss", model.activities().get(0).userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java deleted file mode 100644 index 834e5594e041..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeNodeRequestTests.java +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest; -import org.junit.jupiter.api.Assertions; - -public final class UpdateIntegrationRuntimeNodeRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UpdateIntegrationRuntimeNodeRequest model = BinaryData.fromString("{\"concurrentJobsLimit\":712721872}") - .toObject(UpdateIntegrationRuntimeNodeRequest.class); - Assertions.assertEquals(712721872, model.concurrentJobsLimit()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UpdateIntegrationRuntimeNodeRequest model - = new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(712721872); - model = BinaryData.fromObject(model).toObject(UpdateIntegrationRuntimeNodeRequest.class); - Assertions.assertEquals(712721872, model.concurrentJobsLimit()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java deleted file mode 100644 index beb669b3500a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UpdateIntegrationRuntimeRequestTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAutoUpdate; -import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeRequest; -import org.junit.jupiter.api.Assertions; - -public final class UpdateIntegrationRuntimeRequestTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UpdateIntegrationRuntimeRequest model - = BinaryData.fromString("{\"autoUpdate\":\"On\",\"updateDelayOffset\":\"cwscwsvlx\"}") - .toObject(UpdateIntegrationRuntimeRequest.class); - Assertions.assertEquals(IntegrationRuntimeAutoUpdate.ON, model.autoUpdate()); - Assertions.assertEquals("cwscwsvlx", model.updateDelayOffset()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UpdateIntegrationRuntimeRequest model - = new UpdateIntegrationRuntimeRequest().withAutoUpdate(IntegrationRuntimeAutoUpdate.ON) - .withUpdateDelayOffset("cwscwsvlx"); - model = BinaryData.fromObject(model).toObject(UpdateIntegrationRuntimeRequest.class); - Assertions.assertEquals(IntegrationRuntimeAutoUpdate.ON, model.autoUpdate()); - Assertions.assertEquals("cwscwsvlx", model.updateDelayOffset()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java deleted file mode 100644 index efcb128294b1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserAccessPolicyTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.UserAccessPolicy; -import org.junit.jupiter.api.Assertions; - -public final class UserAccessPolicyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UserAccessPolicy model = BinaryData.fromString( - "{\"permissions\":\"eputtmrywnuzoqf\",\"accessResourcePath\":\"yqzrnkcqvyxlw\",\"profileName\":\"lsicohoqqnwv\",\"startTime\":\"yav\",\"expireTime\":\"heun\"}") - .toObject(UserAccessPolicy.class); - Assertions.assertEquals("eputtmrywnuzoqf", model.permissions()); - Assertions.assertEquals("yqzrnkcqvyxlw", model.accessResourcePath()); - Assertions.assertEquals("lsicohoqqnwv", model.profileName()); - Assertions.assertEquals("yav", model.startTime()); - Assertions.assertEquals("heun", model.expireTime()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UserAccessPolicy model = new UserAccessPolicy().withPermissions("eputtmrywnuzoqf") - .withAccessResourcePath("yqzrnkcqvyxlw") - .withProfileName("lsicohoqqnwv") - .withStartTime("yav") - .withExpireTime("heun"); - model = BinaryData.fromObject(model).toObject(UserAccessPolicy.class); - Assertions.assertEquals("eputtmrywnuzoqf", model.permissions()); - Assertions.assertEquals("yqzrnkcqvyxlw", model.accessResourcePath()); - Assertions.assertEquals("lsicohoqqnwv", model.profileName()); - Assertions.assertEquals("yav", model.startTime()); - Assertions.assertEquals("heun", model.expireTime()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java deleted file mode 100644 index 8d78910b4903..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/UserPropertyTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import org.junit.jupiter.api.Assertions; - -public final class UserPropertyTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UserProperty model = BinaryData.fromString("{\"name\":\"fmluiqtqzfavyvn\",\"value\":\"dataqybaryeua\"}") - .toObject(UserProperty.class); - Assertions.assertEquals("fmluiqtqzfavyvn", model.name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UserProperty model = new UserProperty().withName("fmluiqtqzfavyvn").withValue("dataqybaryeua"); - model = BinaryData.fromObject(model).toObject(UserProperty.class); - Assertions.assertEquals("fmluiqtqzfavyvn", model.name()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java deleted file mode 100644 index 0ab518d50f64..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTests.java +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.ValidationActivity; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ValidationActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ValidationActivity model = BinaryData.fromString( - "{\"type\":\"Validation\",\"typeProperties\":{\"timeout\":\"datazhtovs\",\"sleep\":\"dataeothewok\",\"minimumSize\":\"datavpkdkdsdsmavtndg\",\"childItems\":\"datatximnpcghcfuduq\",\"dataset\":{\"referenceName\":\"fdtpurgeryb\",\"parameters\":{\"werukuoeyyxcd\":\"datajeea\"}}},\"name\":\"lkkglahdwxyite\",\"description\":\"oe\",\"state\":\"Inactive\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"fza\",\"dependencyConditions\":[\"Succeeded\",\"Failed\",\"Succeeded\"],\"\":{\"jrhgwgsbaew\":\"dataofohuxpfx\"}},{\"activity\":\"kqvku\",\"dependencyConditions\":[\"Completed\",\"Failed\",\"Completed\"],\"\":{\"p\":\"datalfk\"}}],\"userProperties\":[{\"name\":\"zsirhp\",\"value\":\"datagqdz\"},{\"name\":\"drcj\",\"value\":\"dataywbssli\"},{\"name\":\"hcpuddbzxi\",\"value\":\"dataqqeslnaoxke\"},{\"name\":\"utrlzzztg\",\"value\":\"datafzyxamyjhp\"}],\"\":{\"lqmddtpwilyg\":\"datavsj\",\"oqtscduuywg\":\"datao\"}}") - .toObject(ValidationActivity.class); - Assertions.assertEquals("lkkglahdwxyite", model.name()); - Assertions.assertEquals("oe", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("fza", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zsirhp", model.userProperties().get(0).name()); - Assertions.assertEquals("fdtpurgeryb", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ValidationActivity model = new ValidationActivity().withName("lkkglahdwxyite") - .withDescription("oe") - .withState(ActivityState.INACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("fza") - .withDependencyConditions(Arrays.asList(DependencyCondition.SUCCEEDED, DependencyCondition.FAILED, - DependencyCondition.SUCCEEDED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("kqvku") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.FAILED, - DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("zsirhp").withValue("datagqdz"), - new UserProperty().withName("drcj").withValue("dataywbssli"), - new UserProperty().withName("hcpuddbzxi").withValue("dataqqeslnaoxke"), - new UserProperty().withName("utrlzzztg").withValue("datafzyxamyjhp"))) - .withTimeout("datazhtovs") - .withSleep("dataeothewok") - .withMinimumSize("datavpkdkdsdsmavtndg") - .withChildItems("datatximnpcghcfuduq") - .withDataset(new DatasetReference().withReferenceName("fdtpurgeryb") - .withParameters(mapOf("werukuoeyyxcd", "datajeea"))); - model = BinaryData.fromObject(model).toObject(ValidationActivity.class); - Assertions.assertEquals("lkkglahdwxyite", model.name()); - Assertions.assertEquals("oe", model.description()); - Assertions.assertEquals(ActivityState.INACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("fza", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.SUCCEEDED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("zsirhp", model.userProperties().get(0).name()); - Assertions.assertEquals("fdtpurgeryb", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java deleted file mode 100644 index e0c376ee8641..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ValidationActivityTypePropertiesTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.ValidationActivityTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ValidationActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ValidationActivityTypeProperties model = BinaryData.fromString( - "{\"timeout\":\"datavcfmtm\",\"sleep\":\"datav\",\"minimumSize\":\"datazgtlxgtuswjgts\",\"childItems\":\"datalbjy\",\"dataset\":{\"referenceName\":\"ostrcbqocerbwaiq\",\"parameters\":{\"ean\":\"datazigelphauldals\"}}}") - .toObject(ValidationActivityTypeProperties.class); - Assertions.assertEquals("ostrcbqocerbwaiq", model.dataset().referenceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ValidationActivityTypeProperties model = new ValidationActivityTypeProperties().withTimeout("datavcfmtm") - .withSleep("datav") - .withMinimumSize("datazgtlxgtuswjgts") - .withChildItems("datalbjy") - .withDataset(new DatasetReference().withReferenceName("ostrcbqocerbwaiq") - .withParameters(mapOf("ean", "datazigelphauldals"))); - model = BinaryData.fromObject(model).toObject(ValidationActivityTypeProperties.class); - Assertions.assertEquals("ostrcbqocerbwaiq", model.dataset().referenceName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java deleted file mode 100644 index 42ee762f43dd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VariableSpecificationTests.java +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.VariableSpecification; -import com.azure.resourcemanager.datafactory.models.VariableType; -import org.junit.jupiter.api.Assertions; - -public final class VariableSpecificationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - VariableSpecification model - = BinaryData.fromString("{\"type\":\"Array\",\"defaultValue\":\"dataqabqgzslesjcb\"}") - .toObject(VariableSpecification.class); - Assertions.assertEquals(VariableType.ARRAY, model.type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - VariableSpecification model - = new VariableSpecification().withType(VariableType.ARRAY).withDefaultValue("dataqabqgzslesjcb"); - model = BinaryData.fromObject(model).toObject(VariableSpecification.class); - Assertions.assertEquals(VariableType.ARRAY, model.type()); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java deleted file mode 100644 index c32f83f3b754..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaDatasetTypePropertiesTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.VerticaDatasetTypeProperties; - -public final class VerticaDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - VerticaDatasetTypeProperties model = BinaryData.fromString( - "{\"tableName\":\"datafofkbcjzzwweob\",\"table\":\"datatdqzhixccnkfso\",\"schema\":\"datayoxmyqzyqepg\"}") - .toObject(VerticaDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - VerticaDatasetTypeProperties model = new VerticaDatasetTypeProperties().withTableName("datafofkbcjzzwweob") - .withTable("datatdqzhixccnkfso") - .withSchema("datayoxmyqzyqepg"); - model = BinaryData.fromObject(model).toObject(VerticaDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java deleted file mode 100644 index d4cfbb27af8a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.VerticaSource; - -public final class VerticaSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - VerticaSource model = BinaryData.fromString( - "{\"type\":\"VerticaSource\",\"query\":\"dataizyjch\",\"queryTimeout\":\"dataasjrseqpo\",\"additionalColumns\":\"datahgksqwzuosyyxl\",\"sourceRetryCount\":\"dataxzudfarzayrdyrow\",\"sourceRetryWait\":\"datakpdpudqiwhvxb\",\"maxConcurrentConnections\":\"datapoeuufwsmdeffrb\",\"disableMetricsCollection\":\"datajedycjisxsp\",\"\":{\"no\":\"dataydphls\",\"vjlqfzlbpe\":\"dataqb\"}}") - .toObject(VerticaSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - VerticaSource model = new VerticaSource().withSourceRetryCount("dataxzudfarzayrdyrow") - .withSourceRetryWait("datakpdpudqiwhvxb") - .withMaxConcurrentConnections("datapoeuufwsmdeffrb") - .withDisableMetricsCollection("datajedycjisxsp") - .withQueryTimeout("dataasjrseqpo") - .withAdditionalColumns("datahgksqwzuosyyxl") - .withQuery("dataizyjch"); - model = BinaryData.fromObject(model).toObject(VerticaSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java deleted file mode 100644 index cdef59c58163..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/VerticaTableDatasetTests.java +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.VerticaTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class VerticaTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - VerticaTableDataset model = BinaryData.fromString( - "{\"type\":\"VerticaTable\",\"typeProperties\":{\"tableName\":\"datao\",\"table\":\"dataguhbnhogsezreneg\",\"schema\":\"datadtyzpx\"},\"description\":\"twkejmgem\",\"structure\":\"dataupehs\",\"schema\":\"datasdfvhrypezammpka\",\"linkedServiceName\":{\"referenceName\":\"vnpeukgnmf\",\"parameters\":{\"mlcsvk\":\"dataqnitr\",\"xlawmvdyqab\":\"datafpsrowshvfxj\"}},\"parameters\":{\"nyjyuwqlzwgd\":{\"type\":\"String\",\"defaultValue\":\"datafqdmll\"}},\"annotations\":[\"datawiril\"],\"folder\":{\"name\":\"trhqdoxdeg\"},\"\":{\"idpdaqermndd\":\"dataedpkwdtobpgd\",\"dureevivk\":\"datairqqclsaqife\",\"fpxeswctlfytb\":\"dataglioklsu\",\"ddfclmowu\":\"datatytvnpbgces\"}}") - .toObject(VerticaTableDataset.class); - Assertions.assertEquals("twkejmgem", model.description()); - Assertions.assertEquals("vnpeukgnmf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nyjyuwqlzwgd").type()); - Assertions.assertEquals("trhqdoxdeg", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - VerticaTableDataset model = new VerticaTableDataset().withDescription("twkejmgem") - .withStructure("dataupehs") - .withSchema("datasdfvhrypezammpka") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("vnpeukgnmf") - .withParameters(mapOf("mlcsvk", "dataqnitr", "xlawmvdyqab", "datafpsrowshvfxj"))) - .withParameters(mapOf("nyjyuwqlzwgd", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datafqdmll"))) - .withAnnotations(Arrays.asList("datawiril")) - .withFolder(new DatasetFolder().withName("trhqdoxdeg")) - .withTableName("datao") - .withTable("dataguhbnhogsezreneg") - .withSchemaTypePropertiesSchema("datadtyzpx"); - model = BinaryData.fromObject(model).toObject(VerticaTableDataset.class); - Assertions.assertEquals("twkejmgem", model.description()); - Assertions.assertEquals("vnpeukgnmf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("nyjyuwqlzwgd").type()); - Assertions.assertEquals("trhqdoxdeg", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java deleted file mode 100644 index b4d57b3be6bd..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ActivityDependency; -import com.azure.resourcemanager.datafactory.models.ActivityOnInactiveMarkAs; -import com.azure.resourcemanager.datafactory.models.ActivityState; -import com.azure.resourcemanager.datafactory.models.DependencyCondition; -import com.azure.resourcemanager.datafactory.models.UserProperty; -import com.azure.resourcemanager.datafactory.models.WaitActivity; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WaitActivityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WaitActivity model = BinaryData.fromString( - "{\"type\":\"Wait\",\"typeProperties\":{\"waitTimeInSeconds\":\"datavktlrcauad\"},\"name\":\"prjs\",\"description\":\"n\",\"state\":\"Active\",\"onInactiveMarkAs\":\"Failed\",\"dependsOn\":[{\"activity\":\"mxxpfylmpftwt\",\"dependencyConditions\":[\"Completed\"],\"\":{\"hpvohvcaq\":\"datatgwwtaolfdgjrg\",\"zz\":\"datarpp\"}},{\"activity\":\"arnjueqo\",\"dependencyConditions\":[\"Completed\",\"Completed\"],\"\":{\"xyznnpazbfrqotig\":\"datazoihtncadrmthh\",\"wqyhklhossc\":\"datandfrxn\",\"samhxkjjhflrgx\":\"datajtiungjbfmrsjgm\"}}],\"userProperties\":[{\"name\":\"h\",\"value\":\"dataadgqpbgzyafazwie\"}],\"\":{\"uinrs\":\"dataxjjdbo\",\"bcwtcqjsvlzdus\":\"datarijcwnthtq\",\"tssbkzdgwpyljn\":\"datazbvdzjlkocjuajcl\"}}") - .toObject(WaitActivity.class); - Assertions.assertEquals("prjs", model.name()); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("mxxpfylmpftwt", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("h", model.userProperties().get(0).name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WaitActivity model = new WaitActivity().withName("prjs") - .withDescription("n") - .withState(ActivityState.ACTIVE) - .withOnInactiveMarkAs(ActivityOnInactiveMarkAs.FAILED) - .withDependsOn(Arrays.asList( - new ActivityDependency().withActivity("mxxpfylmpftwt") - .withDependencyConditions(Arrays.asList(DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()), - new ActivityDependency().withActivity("arnjueqo") - .withDependencyConditions( - Arrays.asList(DependencyCondition.COMPLETED, DependencyCondition.COMPLETED)) - .withAdditionalProperties(mapOf()))) - .withUserProperties(Arrays.asList(new UserProperty().withName("h").withValue("dataadgqpbgzyafazwie"))) - .withWaitTimeInSeconds("datavktlrcauad"); - model = BinaryData.fromObject(model).toObject(WaitActivity.class); - Assertions.assertEquals("prjs", model.name()); - Assertions.assertEquals("n", model.description()); - Assertions.assertEquals(ActivityState.ACTIVE, model.state()); - Assertions.assertEquals(ActivityOnInactiveMarkAs.FAILED, model.onInactiveMarkAs()); - Assertions.assertEquals("mxxpfylmpftwt", model.dependsOn().get(0).activity()); - Assertions.assertEquals(DependencyCondition.COMPLETED, model.dependsOn().get(0).dependencyConditions().get(0)); - Assertions.assertEquals("h", model.userProperties().get(0).name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java deleted file mode 100644 index 19a6078a63dc..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WaitActivityTypePropertiesTests.java +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.WaitActivityTypeProperties; - -public final class WaitActivityTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WaitActivityTypeProperties model = BinaryData.fromString("{\"waitTimeInSeconds\":\"dataiprjahgqzb\"}") - .toObject(WaitActivityTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WaitActivityTypeProperties model = new WaitActivityTypeProperties().withWaitTimeInSeconds("dataiprjahgqzb"); - model = BinaryData.fromObject(model).toObject(WaitActivityTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java deleted file mode 100644 index df7164e21f23..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSinkTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DWCopyCommandDefaultValue; -import com.azure.resourcemanager.datafactory.models.DWCopyCommandSettings; -import com.azure.resourcemanager.datafactory.models.WarehouseSink; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WarehouseSinkTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WarehouseSink model = BinaryData.fromString( - "{\"type\":\"WarehouseSink\",\"preCopyScript\":\"datafcxdldhhkdeviw\",\"allowCopyCommand\":\"datahfxvl\",\"copyCommandSettings\":{\"defaultValues\":[{\"columnName\":\"datasmgh\",\"defaultValue\":\"datatuujcuavctxyrmws\"},{\"columnName\":\"datazmy\",\"defaultValue\":\"datan\"}],\"additionalOptions\":{\"ugpkunvygupgnnvm\":\"jxvotidlwmewrgul\"}},\"tableOption\":\"datazqmxwwmekms\",\"writeBehavior\":\"datafjbefszfrxfy\",\"writeBatchSize\":\"dataypxcqmdeecd\",\"writeBatchTimeout\":\"datajsizyhp\",\"sinkRetryCount\":\"dataykgrtwhmad\",\"sinkRetryWait\":\"datasmwjkqtfyuygy\",\"maxConcurrentConnections\":\"datahchxueaitzgewwqw\",\"disableMetricsCollection\":\"datatkr\",\"\":{\"pkrrppmwozwji\":\"databzrlfsewusq\",\"vzslttkpirku\":\"dataxyrtec\",\"zhsdpfoabmahuwxo\":\"datacee\",\"tgsocqkdclbzqnao\":\"dataddqzewrr\"}}") - .toObject(WarehouseSink.class); - Assertions.assertEquals("jxvotidlwmewrgul", - model.copyCommandSettings().additionalOptions().get("ugpkunvygupgnnvm")); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WarehouseSink model - = new WarehouseSink().withWriteBatchSize("dataypxcqmdeecd") - .withWriteBatchTimeout("datajsizyhp") - .withSinkRetryCount("dataykgrtwhmad") - .withSinkRetryWait("datasmwjkqtfyuygy") - .withMaxConcurrentConnections("datahchxueaitzgewwqw") - .withDisableMetricsCollection("datatkr") - .withPreCopyScript("datafcxdldhhkdeviw") - .withAllowCopyCommand("datahfxvl") - .withCopyCommandSettings(new DWCopyCommandSettings() - .withDefaultValues(Arrays.asList( - new DWCopyCommandDefaultValue().withColumnName("datasmgh") - .withDefaultValue("datatuujcuavctxyrmws"), - new DWCopyCommandDefaultValue().withColumnName("datazmy").withDefaultValue("datan"))) - .withAdditionalOptions(mapOf("ugpkunvygupgnnvm", "jxvotidlwmewrgul"))) - .withTableOption("datazqmxwwmekms") - .withWriteBehavior("datafjbefszfrxfy"); - model = BinaryData.fromObject(model).toObject(WarehouseSink.class); - Assertions.assertEquals("jxvotidlwmewrgul", - model.copyCommandSettings().additionalOptions().get("ugpkunvygupgnnvm")); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java deleted file mode 100644 index 1a783eb4fd4f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseSourceTests.java +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.SqlPartitionSettings; -import com.azure.resourcemanager.datafactory.models.WarehouseSource; - -public final class WarehouseSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WarehouseSource model = BinaryData.fromString( - "{\"type\":\"WarehouseSource\",\"sqlReaderQuery\":\"datatbgoccy\",\"sqlReaderStoredProcedureName\":\"datasrhb\",\"storedProcedureParameters\":\"databnuflfzaw\",\"isolationLevel\":\"datazexhbpyoqfbjpclb\",\"partitionOption\":\"dataojpj\",\"partitionSettings\":{\"partitionColumnName\":\"datawyhxzythxzrvjfsm\",\"partitionUpperBound\":\"datadhhwfrm\",\"partitionLowerBound\":\"dataokefdgfexa\"},\"queryTimeout\":\"datat\",\"additionalColumns\":\"datapszdn\",\"sourceRetryCount\":\"datao\",\"sourceRetryWait\":\"dataqxmdievkmrso\",\"maxConcurrentConnections\":\"datayiheheimuqqmd\",\"disableMetricsCollection\":\"datawxfmrm\",\"\":{\"sz\":\"dataypsypmthf\"}}") - .toObject(WarehouseSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WarehouseSource model = new WarehouseSource().withSourceRetryCount("datao") - .withSourceRetryWait("dataqxmdievkmrso") - .withMaxConcurrentConnections("datayiheheimuqqmd") - .withDisableMetricsCollection("datawxfmrm") - .withQueryTimeout("datat") - .withAdditionalColumns("datapszdn") - .withSqlReaderQuery("datatbgoccy") - .withSqlReaderStoredProcedureName("datasrhb") - .withStoredProcedureParameters("databnuflfzaw") - .withIsolationLevel("datazexhbpyoqfbjpclb") - .withPartitionOption("dataojpj") - .withPartitionSettings(new SqlPartitionSettings().withPartitionColumnName("datawyhxzythxzrvjfsm") - .withPartitionUpperBound("datadhhwfrm") - .withPartitionLowerBound("dataokefdgfexa")); - model = BinaryData.fromObject(model).toObject(WarehouseSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java deleted file mode 100644 index 72b959bcec5f..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.WarehouseTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WarehouseTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WarehouseTableDataset model = BinaryData.fromString( - "{\"type\":\"WarehouseTable\",\"typeProperties\":{\"schema\":\"databftswcd\",\"table\":\"datanseptvdtic\"},\"description\":\"fl\",\"structure\":\"datawkopxdk\",\"schema\":\"datawoqhgppwxn\",\"linkedServiceName\":{\"referenceName\":\"kfzrxxf\",\"parameters\":{\"xajtbcqjkbkjcurx\":\"datavqzjnnuwwvjzr\",\"bdz\":\"datajwyzrieitqmlzuw\",\"f\":\"dataajxkmpe\",\"eszunb\":\"datalgeehbdjgplnlxr\"}},\"parameters\":{\"vequzytapgzdhz\":{\"type\":\"String\",\"defaultValue\":\"dataubukqmierzrnob\"},\"hvhnlsevzcrr\":{\"type\":\"SecureString\",\"defaultValue\":\"datacdsysxnk\"},\"sfqeaxdqel\":{\"type\":\"Object\",\"defaultValue\":\"datakgdwqym\"}},\"annotations\":[\"datasaqhvia\",\"datagqrwuhvv\",\"datafznfgpbc\",\"datakcpympdjieask\"],\"folder\":{\"name\":\"clnfusrgnos\"},\"\":{\"ikbvqzrurgbqaucp\":\"databmjphlyyuahvy\",\"gjlyxtugpea\":\"datakxjnohafwmf\",\"sdwxfamtxccfe\":\"datae\"}}") - .toObject(WarehouseTableDataset.class); - Assertions.assertEquals("fl", model.description()); - Assertions.assertEquals("kfzrxxf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vequzytapgzdhz").type()); - Assertions.assertEquals("clnfusrgnos", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WarehouseTableDataset model = new WarehouseTableDataset().withDescription("fl") - .withStructure("datawkopxdk") - .withSchema("datawoqhgppwxn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("kfzrxxf") - .withParameters(mapOf("xajtbcqjkbkjcurx", "datavqzjnnuwwvjzr", "bdz", "datajwyzrieitqmlzuw", "f", - "dataajxkmpe", "eszunb", "datalgeehbdjgplnlxr"))) - .withParameters(mapOf("vequzytapgzdhz", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataubukqmierzrnob"), - "hvhnlsevzcrr", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datacdsysxnk"), - "sfqeaxdqel", - new ParameterSpecification().withType(ParameterType.OBJECT).withDefaultValue("datakgdwqym"))) - .withAnnotations(Arrays.asList("datasaqhvia", "datagqrwuhvv", "datafznfgpbc", "datakcpympdjieask")) - .withFolder(new DatasetFolder().withName("clnfusrgnos")) - .withSchemaTypePropertiesSchema("databftswcd") - .withTable("datanseptvdtic"); - model = BinaryData.fromObject(model).toObject(WarehouseTableDataset.class); - Assertions.assertEquals("fl", model.description()); - Assertions.assertEquals("kfzrxxf", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("vequzytapgzdhz").type()); - Assertions.assertEquals("clnfusrgnos", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java deleted file mode 100644 index c027b0611f41..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WarehouseTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.WarehouseTableDatasetTypeProperties; - -public final class WarehouseTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WarehouseTableDatasetTypeProperties model - = BinaryData.fromString("{\"schema\":\"dataavbghoucvkand\",\"table\":\"dataoimazkmqfwbg\"}") - .toObject(WarehouseTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WarehouseTableDatasetTypeProperties model - = new WarehouseTableDatasetTypeProperties().withSchema("dataavbghoucvkand").withTable("dataoimazkmqfwbg"); - model = BinaryData.fromObject(model).toObject(WarehouseTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java deleted file mode 100644 index 420c427300d7..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebAnonymousAuthenticationTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.WebAnonymousAuthentication; - -public final class WebAnonymousAuthenticationTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebAnonymousAuthentication model - = BinaryData.fromString("{\"authenticationType\":\"Anonymous\",\"url\":\"datajdqvvyjehyvn\"}") - .toObject(WebAnonymousAuthentication.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebAnonymousAuthentication model = new WebAnonymousAuthentication().withUrl("datajdqvvyjehyvn"); - model = BinaryData.fromObject(model).toObject(WebAnonymousAuthentication.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java deleted file mode 100644 index d24280fa4bb1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.WebLinkedService; -import com.azure.resourcemanager.datafactory.models.WebLinkedServiceTypeProperties; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WebLinkedServiceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebLinkedService model = BinaryData.fromString( - "{\"type\":\"Web\",\"typeProperties\":{\"authenticationType\":\"WebLinkedServiceTypeProperties\",\"url\":\"dataucmzkqtkfjacktav\"},\"connectVia\":{\"referenceName\":\"bgodjfyplavbvs\",\"parameters\":{\"teikf\":\"datadsoqwexiebnz\",\"etqj\":\"datajqdfadgywyla\"}},\"description\":\"hyst\",\"parameters\":{\"zolrvw\":{\"type\":\"Int\",\"defaultValue\":\"databtkogfggyl\"},\"swrjyb\":{\"type\":\"Int\",\"defaultValue\":\"dataeqjteoaxirmg\"},\"lzukego\":{\"type\":\"SecureString\",\"defaultValue\":\"dataobamtarirdzdgvqo\"}},\"annotations\":[\"datapypbm\",\"dataqsxblmnxrxkul\",\"datativviyqonbxxyfoz\"],\"\":{\"edpnk\":\"datadywxjikfrxvlu\"}}") - .toObject(WebLinkedService.class); - Assertions.assertEquals("bgodjfyplavbvs", model.connectVia().referenceName()); - Assertions.assertEquals("hyst", model.description()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("zolrvw").type()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebLinkedService model = new WebLinkedService() - .withConnectVia(new IntegrationRuntimeReference().withReferenceName("bgodjfyplavbvs") - .withParameters(mapOf("teikf", "datadsoqwexiebnz", "etqj", "datajqdfadgywyla"))) - .withDescription("hyst") - .withParameters(mapOf("zolrvw", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("databtkogfggyl"), "swrjyb", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataeqjteoaxirmg"), - "lzukego", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataobamtarirdzdgvqo"))) - .withAnnotations(Arrays.asList("datapypbm", "dataqsxblmnxrxkul", "datativviyqonbxxyfoz")) - .withTypeProperties(new WebLinkedServiceTypeProperties().withUrl("dataucmzkqtkfjacktav")); - model = BinaryData.fromObject(model).toObject(WebLinkedService.class); - Assertions.assertEquals("bgodjfyplavbvs", model.connectVia().referenceName()); - Assertions.assertEquals("hyst", model.description()); - Assertions.assertEquals(ParameterType.INT, model.parameters().get("zolrvw").type()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java deleted file mode 100644 index af1b0884a77a..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebLinkedServiceTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.WebLinkedServiceTypeProperties; - -public final class WebLinkedServiceTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebLinkedServiceTypeProperties model = BinaryData - .fromString("{\"authenticationType\":\"WebLinkedServiceTypeProperties\",\"url\":\"dataimqaxtmvmycvjpa\"}") - .toObject(WebLinkedServiceTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebLinkedServiceTypeProperties model = new WebLinkedServiceTypeProperties().withUrl("dataimqaxtmvmycvjpa"); - model = BinaryData.fromObject(model).toObject(WebLinkedServiceTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java deleted file mode 100644 index e2888a73ee2e..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebSourceTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.WebSource; - -public final class WebSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebSource model = BinaryData.fromString( - "{\"type\":\"WebSource\",\"additionalColumns\":\"datac\",\"sourceRetryCount\":\"databutmxtijs\",\"sourceRetryWait\":\"datatdp\",\"maxConcurrentConnections\":\"datajtwibwcd\",\"disableMetricsCollection\":\"datamnswxq\",\"\":{\"tdlgxmgghutlhs\":\"dataffcanvr\",\"e\":\"datazljyogcpw\",\"lqdikuvjcls\":\"datapbiuwnxhqeljmf\"}}") - .toObject(WebSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebSource model = new WebSource().withSourceRetryCount("databutmxtijs") - .withSourceRetryWait("datatdp") - .withMaxConcurrentConnections("datajtwibwcd") - .withDisableMetricsCollection("datamnswxq") - .withAdditionalColumns("datac"); - model = BinaryData.fromObject(model).toObject(WebSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java deleted file mode 100644 index 03c266158ba1..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.WebTableDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WebTableDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebTableDataset model = BinaryData.fromString( - "{\"type\":\"WebTable\",\"typeProperties\":{\"index\":\"dataawexizb\",\"path\":\"dataetji\"},\"description\":\"hpnbm\",\"structure\":\"datavvyxtvvxnakzi\",\"schema\":\"datakaybfmlngf\",\"linkedServiceName\":{\"referenceName\":\"hrmvla\",\"parameters\":{\"zblxna\":\"datajmwxn\",\"kovohwvpr\":\"datahsmfndcbsyhludzj\",\"cntjna\":\"datafdvtdurmdt\",\"twskkfkuyikmxhh\":\"datafhvqiias\"}},\"parameters\":{\"lfl\":{\"type\":\"String\",\"defaultValue\":\"datajkewriglbqtrefe\"},\"cvdefxonztpcjpt\":{\"type\":\"Int\",\"defaultValue\":\"dataffzw\"},\"h\":{\"type\":\"SecureString\",\"defaultValue\":\"dataqrcjqpzjvnpjr\"},\"viqehmdqvaol\":{\"type\":\"Float\",\"defaultValue\":\"datajbioag\"}},\"annotations\":[\"datadfs\",\"datavkjcim\",\"datarsvxphtjnhptj\"],\"folder\":{\"name\":\"dcyzhimmydtd\"},\"\":{\"prk\":\"datamizuzjdurgyzcsla\",\"gacnqp\":\"dataoyimxpggktteag\"}}") - .toObject(WebTableDataset.class); - Assertions.assertEquals("hpnbm", model.description()); - Assertions.assertEquals("hrmvla", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("lfl").type()); - Assertions.assertEquals("dcyzhimmydtd", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebTableDataset model - = new WebTableDataset().withDescription("hpnbm") - .withStructure("datavvyxtvvxnakzi") - .withSchema("datakaybfmlngf") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("hrmvla") - .withParameters(mapOf("zblxna", "datajmwxn", "kovohwvpr", "datahsmfndcbsyhludzj", "cntjna", - "datafdvtdurmdt", "twskkfkuyikmxhh", "datafhvqiias"))) - .withParameters( - mapOf("lfl", - new ParameterSpecification().withType(ParameterType.STRING) - .withDefaultValue("datajkewriglbqtrefe"), - "cvdefxonztpcjpt", - new ParameterSpecification().withType(ParameterType.INT).withDefaultValue("dataffzw"), "h", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataqrcjqpzjvnpjr"), - "viqehmdqvaol", - new ParameterSpecification().withType(ParameterType.FLOAT).withDefaultValue("datajbioag"))) - .withAnnotations(Arrays.asList("datadfs", "datavkjcim", "datarsvxphtjnhptj")) - .withFolder(new DatasetFolder().withName("dcyzhimmydtd")) - .withIndex("dataawexizb") - .withPath("dataetji"); - model = BinaryData.fromObject(model).toObject(WebTableDataset.class); - Assertions.assertEquals("hpnbm", model.description()); - Assertions.assertEquals("hrmvla", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.STRING, model.parameters().get("lfl").type()); - Assertions.assertEquals("dcyzhimmydtd", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java deleted file mode 100644 index 82433f68b1a8..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WebTableDatasetTypePropertiesTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.WebTableDatasetTypeProperties; - -public final class WebTableDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WebTableDatasetTypeProperties model = BinaryData.fromString("{\"index\":\"datauytv\",\"path\":\"dataeylpby\"}") - .toObject(WebTableDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WebTableDatasetTypeProperties model - = new WebTableDatasetTypeProperties().withIndex("datauytv").withPath("dataeylpby"); - model = BinaryData.fromObject(model).toObject(WebTableDatasetTypeProperties.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java deleted file mode 100644 index 9e55632bfbce..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/WranglingDataFlowTests.java +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DataFlowFolder; -import com.azure.resourcemanager.datafactory.models.DataFlowReference; -import com.azure.resourcemanager.datafactory.models.DataFlowReferenceType; -import com.azure.resourcemanager.datafactory.models.DatasetReference; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.PowerQuerySource; -import com.azure.resourcemanager.datafactory.models.WranglingDataFlow; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class WranglingDataFlowTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - WranglingDataFlow model = BinaryData.fromString( - "{\"type\":\"WranglingDataFlow\",\"typeProperties\":{\"sources\":[{\"script\":\"niucbdaombwiinj\",\"schemaLinkedService\":{\"referenceName\":\"l\",\"parameters\":{\"owavvqxuajgcqw\":\"datale\",\"i\":\"datalynkgfcfdruw\",\"byokvjgbzsxe\":\"dataxxtclhuulri\",\"yhcdjwsuoardnag\":\"datarsltt\"}},\"name\":\"tpufpbpgnrholhu\",\"description\":\"fwxi\",\"dataset\":{\"referenceName\":\"kysolsyjprxs\",\"parameters\":{\"hfvhuwzbxpcqz\":\"datadmcvhtbbz\",\"lrrskap\":\"dataihotjecohmxv\"}},\"linkedService\":{\"referenceName\":\"wie\",\"parameters\":{\"imyc\":\"datayaderltfokyks\",\"rsejegprkj\":\"datagrvkcxzznnuif\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"rjmwvvbtuqkxxi\",\"datasetParameters\":\"datagxql\",\"parameters\":{\"vjaqu\":\"dataotjgxieqfkyfhi\"},\"\":{\"mj\":\"dataynvskpajbmgeume\",\"apeqiscrpil\":\"dataxcbccwkqmt\"}}},{\"script\":\"trqrejda\",\"schemaLinkedService\":{\"referenceName\":\"uqimldahlfxl\",\"parameters\":{\"nfsncs\":\"datafmuad\",\"tybnxgztlcgctj\":\"dataiioshjgcz\",\"ywjzebecuvlbefvw\":\"datahfjv\",\"wpsyxjij\":\"dataljkxpylrwoxz\"}},\"name\":\"ypdvrbkerdkdkga\",\"description\":\"wjxildfkcefeyg\",\"dataset\":{\"referenceName\":\"pjois\",\"parameters\":{\"why\":\"dataaybdjnxumentqon\",\"lwivqt\":\"dataxym\",\"bfajcywhjqwmchq\":\"datawlhlsycoybajasq\"}},\"linkedService\":{\"referenceName\":\"tfxcpupuk\",\"parameters\":{\"pwdlvwti\":\"dataj\",\"pultas\":\"datasmosaonhqnam\",\"bkajlcyizy\":\"dataaekewnazea\",\"e\":\"datadcvxodkrvfsxxby\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"vgecpwgoljtzx\",\"datasetParameters\":\"dataxsdobygoogxqapjx\",\"parameters\":{\"nosdkvi\":\"datayjfucsaod\",\"trnzpducdaaktu\":\"datafasgm\"},\"\":{\"kpp\":\"datazboimyfpqdo\",\"ikff\":\"datawyytfvpctf\"}}},{\"script\":\"g\",\"schemaLinkedService\":{\"referenceName\":\"hznwhvuldbkk\",\"parameters\":{\"gawgazmxjqi\":\"datakj\",\"jjsbcmlzaahzbhur\":\"datah\",\"lirh\":\"dataolk\"}},\"name\":\"mojusuz\",\"description\":\"jzc\",\"dataset\":{\"referenceName\":\"axoialahfxwccokd\",\"parameters\":{\"nuhhoqeqshav\":\"datakmkcz\",\"rqolnthbb\":\"datajqkx\",\"ceyjnc\":\"datakgzukwdrnzkjth\"}},\"linkedService\":{\"referenceName\":\"lfuyfjbpfiddhlr\",\"parameters\":{\"lqkrs\":\"datacqyjmqrfuiocus\"}},\"flowlet\":{\"type\":\"DataFlowReference\",\"referenceName\":\"hxuddqm\",\"datasetParameters\":\"dataffisjmrkkhmwdmd\",\"parameters\":{\"xokwtjawhvagnqfq\":\"dataq\",\"caymjchtvsnvlaq\":\"datadlcvmyo\",\"bczothymgobllms\":\"datanzyzawatuwqko\",\"neakh\":\"datawgwima\"},\"\":{\"gvoogxkfna\":\"dataobcyanrfvqtvk\"}}}],\"script\":\"qymhcctopuow\",\"documentLocale\":\"nskby\"},\"description\":\"uhczy\",\"annotations\":[\"datahajpxecxqnwhsc\",\"datazawmvgxsmp\"],\"folder\":{\"name\":\"wirfljf\"}}") - .toObject(WranglingDataFlow.class); - Assertions.assertEquals("uhczy", model.description()); - Assertions.assertEquals("wirfljf", model.folder().name()); - Assertions.assertEquals("tpufpbpgnrholhu", model.sources().get(0).name()); - Assertions.assertEquals("fwxi", model.sources().get(0).description()); - Assertions.assertEquals("kysolsyjprxs", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("wie", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("rjmwvvbtuqkxxi", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("l", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("niucbdaombwiinj", model.sources().get(0).script()); - Assertions.assertEquals("qymhcctopuow", model.script()); - Assertions.assertEquals("nskby", model.documentLocale()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - WranglingDataFlow model = new WranglingDataFlow().withDescription("uhczy") - .withAnnotations(Arrays.asList("datahajpxecxqnwhsc", "datazawmvgxsmp")) - .withFolder(new DataFlowFolder().withName("wirfljf")) - .withSources(Arrays.asList( - new PowerQuerySource().withName("tpufpbpgnrholhu") - .withDescription("fwxi") - .withDataset(new DatasetReference().withReferenceName("kysolsyjprxs") - .withParameters(mapOf("hfvhuwzbxpcqz", "datadmcvhtbbz", "lrrskap", "dataihotjecohmxv"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("wie") - .withParameters(mapOf("imyc", "datayaderltfokyks", "rsejegprkj", "datagrvkcxzznnuif"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("rjmwvvbtuqkxxi") - .withDatasetParameters("datagxql") - .withParameters(mapOf("vjaqu", "dataotjgxieqfkyfhi")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("l") - .withParameters(mapOf("owavvqxuajgcqw", "datale", "i", "datalynkgfcfdruw", "byokvjgbzsxe", - "dataxxtclhuulri", "yhcdjwsuoardnag", "datarsltt"))) - .withScript("niucbdaombwiinj"), - new PowerQuerySource().withName("ypdvrbkerdkdkga") - .withDescription("wjxildfkcefeyg") - .withDataset(new DatasetReference().withReferenceName("pjois") - .withParameters(mapOf("why", "dataaybdjnxumentqon", "lwivqt", "dataxym", "bfajcywhjqwmchq", - "datawlhlsycoybajasq"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("tfxcpupuk") - .withParameters(mapOf("pwdlvwti", "dataj", "pultas", "datasmosaonhqnam", "bkajlcyizy", - "dataaekewnazea", "e", "datadcvxodkrvfsxxby"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("vgecpwgoljtzx") - .withDatasetParameters("dataxsdobygoogxqapjx") - .withParameters(mapOf("nosdkvi", "datayjfucsaod", "trnzpducdaaktu", "datafasgm")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("uqimldahlfxl") - .withParameters(mapOf("nfsncs", "datafmuad", "tybnxgztlcgctj", "dataiioshjgcz", - "ywjzebecuvlbefvw", "datahfjv", "wpsyxjij", "dataljkxpylrwoxz"))) - .withScript("trqrejda"), - new PowerQuerySource().withName("mojusuz") - .withDescription("jzc") - .withDataset(new DatasetReference().withReferenceName("axoialahfxwccokd") - .withParameters(mapOf("nuhhoqeqshav", "datakmkcz", "rqolnthbb", "datajqkx", "ceyjnc", - "datakgzukwdrnzkjth"))) - .withLinkedService(new LinkedServiceReference().withReferenceName("lfuyfjbpfiddhlr") - .withParameters(mapOf("lqkrs", "datacqyjmqrfuiocus"))) - .withFlowlet(new DataFlowReference().withType(DataFlowReferenceType.DATA_FLOW_REFERENCE) - .withReferenceName("hxuddqm") - .withDatasetParameters("dataffisjmrkkhmwdmd") - .withParameters(mapOf("xokwtjawhvagnqfq", "dataq", "caymjchtvsnvlaq", "datadlcvmyo", - "bczothymgobllms", "datanzyzawatuwqko", "neakh", "datawgwima")) - .withAdditionalProperties(mapOf())) - .withSchemaLinkedService(new LinkedServiceReference().withReferenceName("hznwhvuldbkk") - .withParameters(mapOf("gawgazmxjqi", "datakj", "jjsbcmlzaahzbhur", "datah", "lirh", "dataolk"))) - .withScript("g"))) - .withScript("qymhcctopuow") - .withDocumentLocale("nskby"); - model = BinaryData.fromObject(model).toObject(WranglingDataFlow.class); - Assertions.assertEquals("uhczy", model.description()); - Assertions.assertEquals("wirfljf", model.folder().name()); - Assertions.assertEquals("tpufpbpgnrholhu", model.sources().get(0).name()); - Assertions.assertEquals("fwxi", model.sources().get(0).description()); - Assertions.assertEquals("kysolsyjprxs", model.sources().get(0).dataset().referenceName()); - Assertions.assertEquals("wie", model.sources().get(0).linkedService().referenceName()); - Assertions.assertEquals(DataFlowReferenceType.DATA_FLOW_REFERENCE, model.sources().get(0).flowlet().type()); - Assertions.assertEquals("rjmwvvbtuqkxxi", model.sources().get(0).flowlet().referenceName()); - Assertions.assertEquals("l", model.sources().get(0).schemaLinkedService().referenceName()); - Assertions.assertEquals("niucbdaombwiinj", model.sources().get(0).script()); - Assertions.assertEquals("qymhcctopuow", model.script()); - Assertions.assertEquals("nskby", model.documentLocale()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java deleted file mode 100644 index e367f8b32343..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroObjectDatasetTests.java +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.XeroObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class XeroObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XeroObjectDataset model = BinaryData.fromString( - "{\"type\":\"XeroObject\",\"typeProperties\":{\"tableName\":\"datalrtbofzghfuifwx\"},\"description\":\"ynohocqxug\",\"structure\":\"dataugdcr\",\"schema\":\"databplvhmhur\",\"linkedServiceName\":{\"referenceName\":\"sdjlzbdmdd\",\"parameters\":{\"pvesorsbegclmexa\":\"dataeuyaorser\"}},\"parameters\":{\"unssky\":{\"type\":\"SecureString\",\"defaultValue\":\"datahzydyvtuqvir\"},\"jx\":{\"type\":\"Bool\",\"defaultValue\":\"datat\"},\"uzscyfyi\":{\"type\":\"Bool\",\"defaultValue\":\"datasvjekejchxzjdc\"},\"jeeyv\":{\"type\":\"String\",\"defaultValue\":\"datamasjnfgngxa\"}},\"annotations\":[\"datafckmo\",\"dataljaxv\"],\"folder\":{\"name\":\"xuzhvo\"},\"\":{\"z\":\"datafwflbkjc\",\"cyzcjefpubaldj\":\"dataiyg\",\"lrlkbh\":\"datagldry\"}}") - .toObject(XeroObjectDataset.class); - Assertions.assertEquals("ynohocqxug", model.description()); - Assertions.assertEquals("sdjlzbdmdd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("unssky").type()); - Assertions.assertEquals("xuzhvo", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XeroObjectDataset model = new XeroObjectDataset().withDescription("ynohocqxug") - .withStructure("dataugdcr") - .withSchema("databplvhmhur") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("sdjlzbdmdd") - .withParameters(mapOf("pvesorsbegclmexa", "dataeuyaorser"))) - .withParameters(mapOf("unssky", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("datahzydyvtuqvir"), - "jx", new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datat"), "uzscyfyi", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("datasvjekejchxzjdc"), - "jeeyv", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("datamasjnfgngxa"))) - .withAnnotations(Arrays.asList("datafckmo", "dataljaxv")) - .withFolder(new DatasetFolder().withName("xuzhvo")) - .withTableName("datalrtbofzghfuifwx"); - model = BinaryData.fromObject(model).toObject(XeroObjectDataset.class); - Assertions.assertEquals("ynohocqxug", model.description()); - Assertions.assertEquals("sdjlzbdmdd", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("unssky").type()); - Assertions.assertEquals("xuzhvo", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java deleted file mode 100644 index da2563f853a5..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XeroSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.XeroSource; - -public final class XeroSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XeroSource model = BinaryData.fromString( - "{\"type\":\"XeroSource\",\"query\":\"datahnklesvzdvak\",\"queryTimeout\":\"datajiantcy\",\"additionalColumns\":\"datatrdukdmsktuvjh\",\"sourceRetryCount\":\"datatvyt\",\"sourceRetryWait\":\"datafbsgrzw\",\"maxConcurrentConnections\":\"datadudxqebtrpsplwt\",\"disableMetricsCollection\":\"datacseybvtgcoznnjqx\",\"\":{\"otlymybmgmrkxk\":\"datanku\",\"vfh\":\"datatuynugptfjpi\",\"pzhuhuj\":\"datatqqshb\"}}") - .toObject(XeroSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XeroSource model = new XeroSource().withSourceRetryCount("datatvyt") - .withSourceRetryWait("datafbsgrzw") - .withMaxConcurrentConnections("datadudxqebtrpsplwt") - .withDisableMetricsCollection("datacseybvtgcoznnjqx") - .withQueryTimeout("datajiantcy") - .withAdditionalColumns("datatrdukdmsktuvjh") - .withQuery("datahnklesvzdvak"); - model = BinaryData.fromObject(model).toObject(XeroSource.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java deleted file mode 100644 index 96bfebe97785..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTests.java +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.XmlDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class XmlDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XmlDataset model = BinaryData.fromString( - "{\"type\":\"Xml\",\"typeProperties\":{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datauvda\",\"fileName\":\"dataxmrszbknimxlpne\",\"\":{\"za\":\"datazutylcu\",\"qaeht\":\"datamnb\",\"edcgl\":\"databjmbnvynfaooeac\",\"lpbyxroiduyq\":\"datakakddidahzllrqm\"}},\"encodingName\":\"dataffmnoiics\",\"nullValue\":\"datayhbrjjta\",\"compression\":{\"type\":\"datardsjrho\",\"level\":\"dataqwgusxxhdo\",\"\":{\"bdmvsby\":\"datawyblv\",\"kmkwjfbo\":\"datadaelqpv\",\"v\":\"dataloggdusxursu\",\"qrizfwihvaan\":\"dataxcjkcoqwczsy\"}}},\"description\":\"tnhjrfdmfd\",\"structure\":\"datab\",\"schema\":\"dataxxjfwtgdfkkauig\",\"linkedServiceName\":{\"referenceName\":\"muafmczfedyuepsv\",\"parameters\":{\"jvyweo\":\"dataida\",\"lsyasv\":\"datafkumcfjxoky\",\"krknf\":\"datankwmygjj\",\"rdtulcrcjdklotcs\":\"datarugjqyckgtx\"}},\"parameters\":{\"sl\":{\"type\":\"SecureString\",\"defaultValue\":\"dataonsvobchkxfpwhd\"},\"wayqshwyqxrid\":{\"type\":\"Bool\",\"defaultValue\":\"dataglmnnk\"},\"wqu\":{\"type\":\"Array\",\"defaultValue\":\"datasaqjmkgx\"},\"iizevjykofvez\":{\"type\":\"String\",\"defaultValue\":\"dataylztp\"}},\"annotations\":[\"datahkqtwqlepjj\"],\"folder\":{\"name\":\"as\"},\"\":{\"togffjwajnrtwz\":\"datac\",\"nhrkmjqncfvdsc\":\"dataaqkifmxawostfz\",\"wbgodtggrssg\":\"datahemvwfnqqwypvnd\",\"uymvecvzts\":\"datajfkainj\"}}") - .toObject(XmlDataset.class); - Assertions.assertEquals("tnhjrfdmfd", model.description()); - Assertions.assertEquals("muafmczfedyuepsv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("sl").type()); - Assertions.assertEquals("as", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XmlDataset model = new XmlDataset().withDescription("tnhjrfdmfd") - .withStructure("datab") - .withSchema("dataxxjfwtgdfkkauig") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("muafmczfedyuepsv") - .withParameters(mapOf("jvyweo", "dataida", "lsyasv", "datafkumcfjxoky", "krknf", "datankwmygjj", - "rdtulcrcjdklotcs", "datarugjqyckgtx"))) - .withParameters(mapOf("sl", - new ParameterSpecification().withType(ParameterType.SECURE_STRING) - .withDefaultValue("dataonsvobchkxfpwhd"), - "wayqshwyqxrid", - new ParameterSpecification().withType(ParameterType.BOOL).withDefaultValue("dataglmnnk"), "wqu", - new ParameterSpecification().withType(ParameterType.ARRAY).withDefaultValue("datasaqjmkgx"), - "iizevjykofvez", - new ParameterSpecification().withType(ParameterType.STRING).withDefaultValue("dataylztp"))) - .withAnnotations(Arrays.asList("datahkqtwqlepjj")) - .withFolder(new DatasetFolder().withName("as")) - .withLocation(new DatasetLocation().withFolderPath("datauvda") - .withFileName("dataxmrszbknimxlpne") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("dataffmnoiics") - .withNullValue("datayhbrjjta") - .withCompression(new DatasetCompression().withType("datardsjrho") - .withLevel("dataqwgusxxhdo") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(XmlDataset.class); - Assertions.assertEquals("tnhjrfdmfd", model.description()); - Assertions.assertEquals("muafmczfedyuepsv", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("sl").type()); - Assertions.assertEquals("as", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java deleted file mode 100644 index bd8b5517157b..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlDatasetTypePropertiesTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.fluent.models.XmlDatasetTypeProperties; -import com.azure.resourcemanager.datafactory.models.DatasetCompression; -import com.azure.resourcemanager.datafactory.models.DatasetLocation; -import java.util.HashMap; -import java.util.Map; - -public final class XmlDatasetTypePropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XmlDatasetTypeProperties model = BinaryData.fromString( - "{\"location\":{\"type\":\"DatasetLocation\",\"folderPath\":\"datagmusaictdscnkzzo\",\"fileName\":\"datarddclzeqozrehlb\",\"\":{\"nynpameudpabcre\":\"dataxbnjrqvzyuexoz\",\"xvcvasormrexz\":\"datawzosgy\"}},\"encodingName\":\"dataubewgzygba\",\"nullValue\":\"datavecovsdqhzr\",\"compression\":{\"type\":\"databakrli\",\"level\":\"datafvppkeqsifj\",\"\":{\"bcdtajdo\":\"datangygnhrko\"}}}") - .toObject(XmlDatasetTypeProperties.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XmlDatasetTypeProperties model = new XmlDatasetTypeProperties() - .withLocation(new DatasetLocation().withFolderPath("datagmusaictdscnkzzo") - .withFileName("datarddclzeqozrehlb") - .withAdditionalProperties(mapOf("type", "DatasetLocation"))) - .withEncodingName("dataubewgzygba") - .withNullValue("datavecovsdqhzr") - .withCompression(new DatasetCompression().withType("databakrli") - .withLevel("datafvppkeqsifj") - .withAdditionalProperties(mapOf())); - model = BinaryData.fromObject(model).toObject(XmlDatasetTypeProperties.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java deleted file mode 100644 index 8c5fde829e92..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlReadSettingsTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.XmlReadSettings; -import java.util.HashMap; -import java.util.Map; - -public final class XmlReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XmlReadSettings model = BinaryData.fromString( - "{\"type\":\"XmlReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"jnpatazfyxegkly\":\"datayzrpqehrdldvxcjj\",\"xm\":\"datad\",\"w\":\"dataoqmamascqqtq\",\"mdwi\":\"datatnlyrp\"}},\"validationMode\":\"datasucfor\",\"detectDataType\":\"dataufiphnroiz\",\"namespaces\":\"dataknybfsoayatqk\",\"namespacePrefixes\":\"datauxpldzkvbebdj\",\"\":{\"ccjd\":\"dataggdpjjkox\",\"djhqqlbwid\":\"dataoyxcozzog\"}}") - .toObject(XmlReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XmlReadSettings model = new XmlReadSettings() - .withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))) - .withValidationMode("datasucfor") - .withDetectDataType("dataufiphnroiz") - .withNamespaces("dataknybfsoayatqk") - .withNamespacePrefixes("datauxpldzkvbebdj"); - model = BinaryData.fromObject(model).toObject(XmlReadSettings.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java deleted file mode 100644 index a9a60217ae15..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/XmlSourceTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.CompressionReadSettings; -import com.azure.resourcemanager.datafactory.models.StoreReadSettings; -import com.azure.resourcemanager.datafactory.models.XmlReadSettings; -import com.azure.resourcemanager.datafactory.models.XmlSource; -import java.util.HashMap; -import java.util.Map; - -public final class XmlSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - XmlSource model = BinaryData.fromString( - "{\"type\":\"XmlSource\",\"storeSettings\":{\"type\":\"StoreReadSettings\",\"maxConcurrentConnections\":\"datavpktbnmhx\",\"disableMetricsCollection\":\"datazzpauts\",\"\":{\"nvouytsajjgvusn\":\"datab\"}},\"formatSettings\":{\"type\":\"XmlReadSettings\",\"compressionProperties\":{\"type\":\"CompressionReadSettings\",\"\":{\"cbtegiwcmeyaoy\":\"datakwctlsohrtgpv\"}},\"validationMode\":\"datafgvxau\",\"detectDataType\":\"datanabgrsnfzmth\",\"namespaces\":\"datacuf\",\"namespacePrefixes\":\"datazfot\",\"\":{\"ekh\":\"dataumamdorgl\",\"iwvxmysc\":\"datasfgjbeybdukbgl\"}},\"additionalColumns\":\"dataivoexkonciacdl\",\"sourceRetryCount\":\"datahs\",\"sourceRetryWait\":\"datavxkctedhaf\",\"maxConcurrentConnections\":\"dataiffaj\",\"disableMetricsCollection\":\"datawby\",\"\":{\"uyosigkinykjxqs\":\"datauyxylbbugojdzc\",\"zt\":\"datapeqgedpizjqp\",\"oqakvutedetxokqu\":\"datardiverkwmafy\",\"cdbkceh\":\"datajdwcwj\"}}") - .toObject(XmlSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - XmlSource model = new XmlSource().withSourceRetryCount("datahs") - .withSourceRetryWait("datavxkctedhaf") - .withMaxConcurrentConnections("dataiffaj") - .withDisableMetricsCollection("datawby") - .withStoreSettings(new StoreReadSettings().withMaxConcurrentConnections("datavpktbnmhx") - .withDisableMetricsCollection("datazzpauts") - .withAdditionalProperties(mapOf("type", "StoreReadSettings"))) - .withFormatSettings(new XmlReadSettings() - .withCompressionProperties( - new CompressionReadSettings().withAdditionalProperties(mapOf("type", "CompressionReadSettings"))) - .withValidationMode("datafgvxau") - .withDetectDataType("datanabgrsnfzmth") - .withNamespaces("datacuf") - .withNamespacePrefixes("datazfot")) - .withAdditionalColumns("dataivoexkonciacdl"); - model = BinaryData.fromObject(model).toObject(XmlSource.class); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java deleted file mode 100644 index d112e5ee3ade..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZipDeflateReadSettingsTests.java +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ZipDeflateReadSettings; - -public final class ZipDeflateReadSettingsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ZipDeflateReadSettings model = BinaryData.fromString( - "{\"type\":\"ZipDeflateReadSettings\",\"preserveZipFileNameAsFolder\":\"datahv\",\"\":{\"izviswixlvnwznf\":\"datawrvtflot\",\"sllbdtr\":\"dataapy\",\"fwmajllatbldhc\":\"datakpelyg\",\"slylioguwsh\":\"datahwnithxnainssvrk\"}}") - .toObject(ZipDeflateReadSettings.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ZipDeflateReadSettings model = new ZipDeflateReadSettings().withPreserveZipFileNameAsFolder("datahv"); - model = BinaryData.fromObject(model).toObject(ZipDeflateReadSettings.class); - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java deleted file mode 100644 index 7b4bfa5d3405..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoObjectDatasetTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.DatasetFolder; -import com.azure.resourcemanager.datafactory.models.LinkedServiceReference; -import com.azure.resourcemanager.datafactory.models.ParameterSpecification; -import com.azure.resourcemanager.datafactory.models.ParameterType; -import com.azure.resourcemanager.datafactory.models.ZohoObjectDataset; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ZohoObjectDatasetTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ZohoObjectDataset model = BinaryData.fromString( - "{\"type\":\"ZohoObject\",\"typeProperties\":{\"tableName\":\"datamxcaujbfomfbozpj\"},\"description\":\"e\",\"structure\":\"datapqcwdnnjjthpsn\",\"schema\":\"databycympohxubnnpn\",\"linkedServiceName\":{\"referenceName\":\"ebcxn\",\"parameters\":{\"jvgspj\":\"datapdyzssjlmykdy\",\"oimmsszz\":\"datafzhjngwqxcrbcrg\"}},\"parameters\":{\"nwcnvpnyldjdkj\":{\"type\":\"SecureString\",\"defaultValue\":\"dataognhtvagw\"}},\"annotations\":[\"datayknkxioxhnrjlq\"],\"folder\":{\"name\":\"ejexfdlhuhd\"},\"\":{\"cflvxbocaywmfvuh\":\"datagywadrklpdyehjr\",\"gsfmhwdxqu\":\"datamolhveol\",\"ynhitrnwqgq\":\"dataymlhklmnjqzm\",\"piqnrjoc\":\"databthb\"}}") - .toObject(ZohoObjectDataset.class); - Assertions.assertEquals("e", model.description()); - Assertions.assertEquals("ebcxn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("nwcnvpnyldjdkj").type()); - Assertions.assertEquals("ejexfdlhuhd", model.folder().name()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ZohoObjectDataset model = new ZohoObjectDataset().withDescription("e") - .withStructure("datapqcwdnnjjthpsn") - .withSchema("databycympohxubnnpn") - .withLinkedServiceName(new LinkedServiceReference().withReferenceName("ebcxn") - .withParameters(mapOf("jvgspj", "datapdyzssjlmykdy", "oimmsszz", "datafzhjngwqxcrbcrg"))) - .withParameters(mapOf("nwcnvpnyldjdkj", - new ParameterSpecification().withType(ParameterType.SECURE_STRING).withDefaultValue("dataognhtvagw"))) - .withAnnotations(Arrays.asList("datayknkxioxhnrjlq")) - .withFolder(new DatasetFolder().withName("ejexfdlhuhd")) - .withTableName("datamxcaujbfomfbozpj"); - model = BinaryData.fromObject(model).toObject(ZohoObjectDataset.class); - Assertions.assertEquals("e", model.description()); - Assertions.assertEquals("ebcxn", model.linkedServiceName().referenceName()); - Assertions.assertEquals(ParameterType.SECURE_STRING, model.parameters().get("nwcnvpnyldjdkj").type()); - Assertions.assertEquals("ejexfdlhuhd", model.folder().name()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java deleted file mode 100644 index 8f73a4df25c2..000000000000 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/test/java/com/azure/resourcemanager/datafactory/generated/ZohoSourceTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.datafactory.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.datafactory.models.ZohoSource; - -public final class ZohoSourceTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ZohoSource model = BinaryData.fromString( - "{\"type\":\"ZohoSource\",\"query\":\"datajsez\",\"queryTimeout\":\"datahipteo\",\"additionalColumns\":\"datadnhwdfxgec\",\"sourceRetryCount\":\"datakkdbzbhsnimompxd\",\"sourceRetryWait\":\"datap\",\"maxConcurrentConnections\":\"databdmoawh\",\"disableMetricsCollection\":\"dataxxnmyxzh\",\"\":{\"dq\":\"dataqo\",\"wywayjinlsk\":\"datazhfnylgbwdsa\"}}") - .toObject(ZohoSource.class); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ZohoSource model = new ZohoSource().withSourceRetryCount("datakkdbzbhsnimompxd") - .withSourceRetryWait("datap") - .withMaxConcurrentConnections("databdmoawh") - .withDisableMetricsCollection("dataxxnmyxzh") - .withQueryTimeout("datahipteo") - .withAdditionalColumns("datadnhwdfxgec") - .withQuery("datajsez"); - model = BinaryData.fromObject(model).toObject(ZohoSource.class); - } -}