Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ Generating CSharp code
Executing AutoRest command
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk
Autorest CSharp Version: 2.3.82
2022-09-30 02:08:47 UTC
2022-10-25 04:48:50 UTC
Azure-rest-api-specs repository information
GitHub fork: Azure
Branch: main
Commit: 4994cbed850f3726721ec6fd3235a474e8d08fcc
Commit: fdd4e5c9b9225698c7f26c75c4b26be5c57e60f8
AutoRest information
Requested version: v2
Bootstrapper version: [email protected]
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog for the Azure Data Factory V2 .NET SDK

## Version 8.0.0
### Feature Additions
### Breaking Changes
- Added properties to spark job activity
- Added disablePublish property into FactoryRepoConfiguration
- Added scriptBlockExecutionTimeout property into Script Activity

## Version 7.0.0
### Feature Additions
### Breaking Changes
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class FactoryVSTSConfiguration : FactoryRepoConfiguration
{
/// <summary>
/// Initializes a new instance of the FactoryVSTSConfiguration class.
/// </summary>
/// <param name="accountName">Account name.</param>
/// <param name="repositoryName">Repository name.</param>
/// <param name="collaborationBranch">Collaboration branch.</param>
/// <param name="rootFolder">Root folder.</param>
/// <param name="projectName">VSTS project name.</param>
/// <param name="lastCommitId">Last commit id.</param>
/// <param name="tenantId">VSTS tenant id.</param>
public FactoryVSTSConfiguration(string accountName, string repositoryName, string collaborationBranch, string rootFolder, string projectName, string lastCommitId = default(string), string tenantId = default(string))
: base(accountName, repositoryName, collaborationBranch, rootFolder, lastCommitId)
{
ProjectName = projectName;
TenantId = tenantId;
CustomInit();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
using System.Collections.Generic;

namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class ScriptActivity : ExecutionActivity
{
/// <summary>
/// Initializes a new instance of the ScriptActivity class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="scripts">Array of script blocks. Type: array.</param>
/// <param name="logSettings">Log settings of script activity.</param>
public ScriptActivity(string name, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), IList<ActivityDependency> dependsOn = default(IList<ActivityDependency>), IList<UserProperty> userProperties = default(IList<UserProperty>), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), IList<ScriptActivityScriptBlock> scripts = default(IList<ScriptActivityScriptBlock>), ScriptActivityTypePropertiesLogSettings logSettings = default(ScriptActivityTypePropertiesLogSettings))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
Scripts = scripts;
LogSettings = logSettings;
CustomInit();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
using System.Collections.Generic;

namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class SynapseSparkJobDefinitionActivity : ExecutionActivity
{
/// <summary>
/// Initializes a new instance of the SynapseSparkJobDefinitionActivity
/// class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="sparkJob">Synapse spark job reference.</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="arguments">User specified arguments to
/// SynapseSparkJobDefinitionActivity.</param>
/// <param name="file">The main file used for the job, which will
/// override the 'file' of the spark job definition you provide. Type:
/// string (or Expression with resultType string).</param>
/// <param name="className">The fully-qualified identifier or the main
/// class that is in the main definition file, which will override the
/// 'className' of the spark job definition you provide. Type: string
/// (or Expression with resultType string).</param>
/// <param name="files">Additional files used for reference in the main
/// definition file, which will override the 'files' of the spark job
/// definition you provide.</param>
/// <param name="targetBigDataPool">The name of the big data pool which
/// will be used to execute the spark batch job, which will override
/// the 'targetBigDataPool' of the spark job definition you
/// provide.</param>
/// <param name="executorSize">Number of core and memory to be used for
/// executors allocated in the specified Spark pool for the job, which
/// will be used for overriding 'executorCores' and 'executorMemory' of
/// the spark job definition you provide. Type: string (or Expression
/// with resultType string).</param>
/// <param name="conf">Spark configuration properties, which will
/// override the 'conf' of the spark job definition you
/// provide.</param>
/// <param name="driverSize">Number of core and memory to be used for
/// driver allocated in the specified Spark pool for the job, which
/// will be used for overriding 'driverCores' and 'driverMemory' of the
/// spark job definition you provide. Type: string (or Expression with
/// resultType string).</param>
/// <param name="numExecutors">Number of executors to launch for this
/// job, which will override the 'numExecutors' of the spark job
/// definition you provide.</param>
public SynapseSparkJobDefinitionActivity(string name, SynapseSparkJobReference sparkJob, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), IList<ActivityDependency> dependsOn = default(IList<ActivityDependency>), IList<UserProperty> userProperties = default(IList<UserProperty>), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), IList<object> arguments = default(IList<object>), object file = default(object), object className = default(object), IList<object> files = default(IList<object>), BigDataPoolParametrizationReference targetBigDataPool = default(BigDataPoolParametrizationReference), object executorSize = default(object), object conf = default(object), object driverSize = default(object), int? numExecutors = default(int?))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
SparkJob = sparkJob;
Arguments = arguments;
File = file;
ClassName = className;
Files = files;
TargetBigDataPool = targetBigDataPool;
ExecutorSize = executorSize;
Conf = conf;
DriverSize = driverSize;
NumExecutors = numExecutors;
CustomInit();
}
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading