diff --git a/sdk/storage/storage-blob/CHANGELOG.md b/sdk/storage/storage-blob/CHANGELOG.md
index 045607992edd..e6b1e1618755 100644
--- a/sdk/storage/storage-blob/CHANGELOG.md
+++ b/sdk/storage/storage-blob/CHANGELOG.md
@@ -1,5 +1,17 @@
# Release History
+## 12.20.0 (Unreleased)
+
+### Features Added
+
+### Breaking Changes
+
+### Bugs Fixed
+
+### Other Changes
+
+- Migrated dependency on `@azure/core-http` to `@azure/core-rest-pipeline`.
+
## 12.13.0 (Unreleased)
### Features Added
diff --git a/sdk/storage/storage-blob/package.json b/sdk/storage/storage-blob/package.json
index 07d3e8365291..54afbeaad6b2 100644
--- a/sdk/storage/storage-blob/package.json
+++ b/sdk/storage/storage-blob/package.json
@@ -1,7 +1,7 @@
{
"name": "@azure/storage-blob",
"sdk-type": "client",
- "version": "12.13.0",
+ "version": "12.20.0",
"description": "Microsoft Azure Storage SDK for JavaScript - Blob",
"main": "./dist/index.js",
"module": "./dist-esm/storage-blob/src/index.js",
diff --git a/sdk/storage/storage-file-datalake/CHANGELOG.md b/sdk/storage/storage-file-datalake/CHANGELOG.md
index 7fb2b68c2472..ae7c2cf54efa 100644
--- a/sdk/storage/storage-file-datalake/CHANGELOG.md
+++ b/sdk/storage/storage-file-datalake/CHANGELOG.md
@@ -1,5 +1,17 @@
# Release History
+## 12.20.0 (Unreleased)
+
+### Features Added
+
+### Breaking Changes
+
+### Bugs Fixed
+
+### Other Changes
+
+- Migrated dependency on `@azure/core-http` to `@azure/core-rest-pipeline`.
+
## 12.12.0 (Unreleased)
### Features Added
diff --git a/sdk/storage/storage-file-datalake/package.json b/sdk/storage/storage-file-datalake/package.json
index 1ffa254dd8b6..a41336a598cf 100644
--- a/sdk/storage/storage-file-datalake/package.json
+++ b/sdk/storage/storage-file-datalake/package.json
@@ -1,6 +1,6 @@
{
"name": "@azure/storage-file-datalake",
- "version": "12.12.0",
+ "version": "12.20.0",
"description": "Microsoft Azure Storage SDK for JavaScript - DataLake",
"sdk-type": "client",
"main": "./dist/index.js",
@@ -118,11 +118,15 @@
},
"dependencies": {
"@azure/abort-controller": "^1.0.0",
- "@azure/core-http": "^3.0.0",
+ "@azure/core-auth": "^1.4.0",
+ "@azure/core-client": "^1.6.2",
+ "@azure/core-http-compat": "^2.0.0",
"@azure/core-paging": "^1.1.1",
+ "@azure/core-rest-pipeline": "^1.10.1",
"@azure/core-tracing": "1.0.0-preview.13",
+ "@azure/core-util": "^1.1.1",
"@azure/logger": "^1.0.0",
- "@azure/storage-blob": "^12.12.0",
+ "@azure/storage-blob": "^12.20.0",
"events": "^3.0.0",
"tslib": "^2.2.0"
},
diff --git a/sdk/storage/storage-file-datalake/review/storage-file-datalake.api.md b/sdk/storage/storage-file-datalake/review/storage-file-datalake.api.md
index 4ca7191c2e08..cc8c4f5d3f22 100644
--- a/sdk/storage/storage-file-datalake/review/storage-file-datalake.api.md
+++ b/sdk/storage/storage-file-datalake/review/storage-file-datalake.api.md
@@ -7,46 +7,48 @@
///
import { AbortSignalLike } from '@azure/abort-controller';
+import { AnonymousCredential } from '@azure/storage-blob';
+import { AnonymousCredentialPolicy } from '@azure/storage-blob';
import { AzureLogger } from '@azure/logger';
-import { BaseRequestPolicy } from '@azure/core-http';
import { BlobLeaseClient } from '@azure/storage-blob';
import { BlobQueryArrowConfiguration } from '@azure/storage-blob';
+import { CommonOptions } from '@azure/storage-blob';
import { ContainerRenameResponse } from '@azure/storage-blob';
import { ContainerUndeleteResponse } from '@azure/storage-blob';
-import * as coreHttp from '@azure/core-http';
+import * as coreClient from '@azure/core-client';
+import * as coreHttpCompat from '@azure/core-http-compat';
+import * as coreRestPipeline from '@azure/core-rest-pipeline';
+import { Credential as Credential_2 } from '@azure/storage-blob';
+import { CredentialPolicy } from '@azure/storage-blob';
import { ServiceGetPropertiesResponse as DataLakeServiceGetPropertiesResponse } from '@azure/storage-blob';
import { BlobServiceProperties as DataLakeServiceProperties } from '@azure/storage-blob';
-import { deserializationPolicy } from '@azure/core-http';
-import { HttpHeaders } from '@azure/core-http';
-import { HttpOperationResponse } from '@azure/core-http';
-import { HttpRequestBody } from '@azure/core-http';
-import { HttpResponse } from '@azure/core-http';
-import { HttpClient as IHttpClient } from '@azure/core-http';
-import { KeepAliveOptions } from '@azure/core-http';
import { Lease } from '@azure/storage-blob';
import { LeaseAccessConditions } from '@azure/storage-blob';
import { LeaseOperationOptions } from '@azure/storage-blob';
import { LeaseOperationResponse } from '@azure/storage-blob';
-import { ModifiedAccessConditions as ModifiedAccessConditions_2 } from '@azure/storage-blob';
-import { OperationTracingOptions } from '@azure/core-tracing';
+import { ModifiedAccessConditions as ModifiedAccessConditions_3 } from '@azure/storage-blob';
+import { newPipeline } from '@azure/storage-blob';
import { PagedAsyncIterableIterator } from '@azure/core-paging';
-import { ProxyOptions } from '@azure/core-http';
+import { Pipeline } from '@azure/storage-blob';
import { Readable } from 'stream';
-import { RequestPolicy } from '@azure/core-http';
-import { RequestPolicyFactory } from '@azure/core-http';
-import { RequestPolicyOptions } from '@azure/core-http';
-import { RestError } from '@azure/core-http';
-import { ServiceClientOptions } from '@azure/core-http';
+import { RequestBodyType } from '@azure/core-rest-pipeline';
+import { RestError } from '@azure/core-rest-pipeline';
import { ServiceGetPropertiesOptions } from '@azure/storage-blob';
import { ServiceListContainersSegmentResponse } from '@azure/storage-blob';
import { ServiceRenameContainerOptions } from '@azure/storage-blob';
import { ServiceSetPropertiesOptions } from '@azure/storage-blob';
import { ServiceSetPropertiesResponse } from '@azure/storage-blob';
-import { TokenCredential } from '@azure/core-http';
-import { TransferProgressEvent } from '@azure/core-http';
-import { UserAgentOptions } from '@azure/core-http';
+import { StorageBrowserPolicy } from '@azure/storage-blob';
+import { StorageBrowserPolicyFactory } from '@azure/storage-blob';
+import { StoragePipelineOptions } from '@azure/storage-blob';
+import { StorageRetryPolicy } from '@azure/storage-blob';
+import { StorageRetryPolicyFactory } from '@azure/storage-blob';
+import { StorageSharedKeyCredential } from '@azure/storage-blob';
+import { StorageSharedKeyCredentialPolicy } from '@azure/storage-blob';
+import { TokenCredential } from '@azure/core-auth';
+import { TransferProgressEvent } from '@azure/core-rest-pipeline';
import { UserDelegationKeyModel } from '@azure/storage-blob';
-import { WebResource } from '@azure/core-http';
+import { WithResponse } from '@azure/storage-blob';
// @public
export interface AccessControlChangeCounters {
@@ -129,17 +131,9 @@ export interface AccountSASSignatureValues {
version?: string;
}
-// @public
-export class AnonymousCredential extends Credential_2 {
- create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy;
-}
-
-// @public
-export class AnonymousCredentialPolicy extends CredentialPolicy {
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions);
-}
+export { AnonymousCredential }
-export { BaseRequestPolicy }
+export { AnonymousCredentialPolicy }
// @public (undocumented)
export interface BlobHierarchyListSegment {
@@ -248,10 +242,7 @@ export interface CommonGenerateSasUrlOptions {
version?: string;
}
-// @public
-export interface CommonOptions {
- tracingOptions?: OperationTracingOptions;
-}
+export { CommonOptions }
// @public (undocumented)
export type CopyStatusType = "pending" | "success" | "aborted" | "failed";
@@ -263,20 +254,9 @@ export interface CpkInfo {
encryptionKeySha256?: string;
}
-// @public
-abstract class Credential_2 implements RequestPolicyFactory {
- create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy;
-}
export { Credential_2 as Credential }
-// @public
-export abstract class CredentialPolicy extends BaseRequestPolicy {
- sendRequest(request: WebResource): Promise;
- protected signRequest(request: WebResource): WebResource;
-}
-
-// @public
-export type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy;
+export { CredentialPolicy }
// @public
export class DataLakeAclChangeFailedError extends Error {
@@ -300,12 +280,12 @@ export class DataLakeDirectoryClient extends DataLakePathClient {
export class DataLakeFileClient extends DataLakePathClient {
constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions);
constructor(url: string, pipeline: Pipeline);
- append(body: HttpRequestBody, offset: number, length: number, options?: FileAppendOptions): Promise;
+ append(body: RequestBodyType, offset: number, length: number, options?: FileAppendOptions): Promise;
create(resourceType: PathResourceTypeModel, options?: PathCreateOptions): Promise;
create(options?: FileCreateOptions): Promise;
createIfNotExists(resourceType: PathResourceTypeModel, options?: PathCreateIfNotExistsOptions): Promise;
createIfNotExists(options?: FileCreateIfNotExistsOptions): Promise;
- flush(position: number, options?: FileFlushOptions): Promise;
+ flush(position: number, options?: FileFlushOptions): Promise;
generateSasUrl(options: FileGenerateSasUrlOptions): Promise;
query(query: string, options?: FileQueryOptions): Promise;
read(offset?: number, count?: number, options?: FileReadOptions): Promise;
@@ -313,9 +293,9 @@ export class DataLakeFileClient extends DataLakePathClient {
readToBuffer(offset?: number, count?: number, options?: FileReadToBufferOptions): Promise;
readToFile(filePath: string, offset?: number, count?: number, options?: FileReadOptions): Promise;
setExpiry(mode: FileExpiryMode, options?: FileSetExpiryOptions): Promise;
- upload(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: FileParallelUploadOptions): Promise;
- uploadFile(filePath: string, options?: FileParallelUploadOptions): Promise;
- uploadStream(stream: Readable, options?: FileParallelUploadOptions): Promise;
+ upload(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: FileParallelUploadOptions): Promise;
+ uploadFile(filePath: string, options?: FileParallelUploadOptions): Promise;
+ uploadStream(stream: Readable, options?: FileParallelUploadOptions): Promise;
}
// Warning: (ae-forgotten-export) The symbol "StorageClient" needs to be exported by the entry point index.d.ts
@@ -383,7 +363,7 @@ export class DataLakePathClient extends StorageClient {
setAccessControlRecursive(acl: PathAccessControlItem[], options?: PathChangeAccessControlRecursiveOptions): Promise;
setHttpHeaders(httpHeaders: PathHttpHeaders, options?: PathSetHttpHeadersOptions): Promise;
setMetadata(metadata?: Metadata, options?: PathSetMetadataOptions): Promise;
- setPermissions(permissions: PathPermissions, options?: PathSetPermissionsOptions): Promise;
+ setPermissions(permissions: PathPermissions, options?: PathSetPermissionsOptions): Promise;
toDirectoryClient(): DataLakeDirectoryClient;
toFileClient(): DataLakeFileClient;
updateAccessControlRecursive(acl: PathAccessControlItem[], options?: PathChangeAccessControlRecursiveOptions): Promise;
@@ -472,8 +452,6 @@ export interface DeletedPathList {
pathItems?: DeletedPath[];
}
-export { deserializationPolicy }
-
// @public (undocumented)
export interface DirectoryCreateIfNotExistsOptions extends PathCreateIfNotExistsOptions {
}
@@ -532,12 +510,8 @@ export interface FileAppendOptions extends CommonOptions {
transactionalContentMD5?: Uint8Array;
}
-// @public
-export type FileAppendResponse = PathAppendDataHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: PathAppendDataHeaders;
- };
-};
+// @public (undocumented)
+export type FileAppendResponse = WithResponse;
// @public (undocumented)
export interface FileCreateIfNotExistsOptions extends PathCreateIfNotExistsOptions {
@@ -576,6 +550,9 @@ export interface FileFlushOptions extends CommonOptions {
retainUncommittedData?: boolean;
}
+// @public (undocumented)
+export type FileFlushResponse = WithResponse;
+
// @public
export interface FileGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {
permissions?: DataLakeSASPermissions;
@@ -720,13 +697,10 @@ export interface FileReadOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileReadResponse = FileReadHeaders & {
+export type FileReadResponse = WithResponse;
readableStreamBody?: NodeJS.ReadableStream;
- _response: HttpResponse & {
- parsedHeaders: FileReadHeaders;
- };
-};
+}, FileReadHeaders>;
// @public
export interface FileReadToBufferOptions extends CommonOptions {
@@ -757,12 +731,8 @@ export interface FileSetExpiryOptions extends CommonOptions {
timeToExpireInMs?: number;
}
-// @public
-export type FileSetExpiryResponse = FileSetExpiryHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: FileSetExpiryHeaders;
- };
-};
+// @public (undocumented)
+export type FileSetExpiryResponse = WithResponse;
// @public (undocumented)
export interface FileSystemCreateHeaders {
@@ -797,11 +767,7 @@ export interface FileSystemCreateOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemCreateResponse = FileSystemCreateHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemCreateHeaders;
- };
-};
+export type FileSystemCreateResponse = WithResponse;
// @public (undocumented)
export interface FileSystemDeleteHeaders {
@@ -829,11 +795,7 @@ export interface FileSystemDeleteOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemDeleteResponse = FileSystemDeleteHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemDeleteHeaders;
- };
-};
+export type FileSystemDeleteResponse = WithResponse;
// @public
export interface FileSystemEncryptionScope {
@@ -878,15 +840,9 @@ export interface FileSystemGetAccessPolicyOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemGetAccessPolicyResponse = {
+export type FileSystemGetAccessPolicyResponse = WithResponse<{
signedIdentifiers: SignedIdentifier[];
-} & FileSystemGetAccessPolicyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemGetAccessPolicyHeaders;
- bodyAsText: string;
- parsedBody: SignedIdentifier[];
- };
-};
+} & FileSystemGetAccessPolicyHeaders, FileSystemGetAccessPolicyHeaders, SignedIdentifier[]>;
// @public (undocumented)
export interface FileSystemGetPropertiesHeaders {
@@ -928,11 +884,7 @@ export interface FileSystemGetPropertiesOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemGetPropertiesResponse = FileSystemGetPropertiesHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemGetPropertiesHeaders;
- };
-};
+export type FileSystemGetPropertiesResponse = WithResponse;
// @public (undocumented)
export interface FileSystemItem {
@@ -959,14 +911,9 @@ export interface FileSystemListBlobHierarchySegmentHeaders {
}
// @public (undocumented)
-export type FileSystemListDeletedPathsResponse = DeletedPathList & FileSystemListBlobHierarchySegmentHeaders & ListBlobsHierarchySegmentResponse & {
- _response: HttpResponse & {
- bodyAsText: string;
- parsedBody: ListBlobsHierarchySegmentResponse;
- parsedHeaders: FileSystemListBlobHierarchySegmentHeaders;
- };
+export type FileSystemListDeletedPathsResponse = WithResponse;
// @public
export interface FileSystemListPathsHeaders {
@@ -980,13 +927,7 @@ export interface FileSystemListPathsHeaders {
}
// @public (undocumented)
-export type FileSystemListPathsResponse = PathList & FileSystemListPathsHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemListPathsHeaders;
- bodyAsText: string;
- parsedBody: PathListModel;
- };
-};
+export type FileSystemListPathsResponse = WithResponse;
// @public (undocumented)
export interface FileSystemProperties {
@@ -1058,11 +999,7 @@ export interface FileSystemSetAccessPolicyOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemSetAccessPolicyResponse = FileSystemSetAccessPolicyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemSetAccessPolicyHeaders;
- };
-};
+export type FileSystemSetAccessPolicyResponse = WithResponse;
// @public (undocumented)
export interface FileSystemSetMetadataHeaders {
@@ -1089,11 +1026,7 @@ export interface FileSystemSetMetadataOptions extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemSetMetadataResponse = FileSystemSetMetadataHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemSetMetadataHeaders;
- };
-};
+export type FileSystemSetMetadataResponse = WithResponse;
// @public (undocumented)
export interface FileSystemUndeletePathOption extends CommonOptions {
@@ -1102,16 +1035,16 @@ export interface FileSystemUndeletePathOption extends CommonOptions {
}
// @public (undocumented)
-export type FileSystemUndeletePathResponse = PathUndeleteHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathUndeleteHeaders;
- };
+export type FileSystemUndeletePathResponse = WithResponse;
// @public
export type FileSystemUndeleteResponse = ContainerUndeleteResponse;
+// @public (undocumented)
+export type FileUploadResponse = WithResponse;
+
// @public
export function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters;
@@ -1121,14 +1054,6 @@ export function generateDataLakeSASQueryParameters(dataLakeSASSignatureValues: D
// @public
export function generateDataLakeSASQueryParameters(dataLakeSASSignatureValues: DataLakeSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters;
-export { HttpHeaders }
-
-export { HttpOperationResponse }
-
-export { HttpRequestBody }
-
-export { IHttpClient }
-
export { Lease }
export { LeaseAccessConditions }
@@ -1213,15 +1138,6 @@ export interface ListPathsSegmentOptions extends ListPathsOptions {
maxResults?: number;
}
-// @public
-export type ListPathsSegmentResponse = FileSystemListPathsHeaders & PathListModel & {
- _response: coreHttp.HttpResponse & {
- bodyAsText: string;
- parsedBody: PathListModel;
- parsedHeaders: FileSystemListPathsHeaders;
- };
-};
-
// @public
export const logger: AzureLogger;
@@ -1232,10 +1148,9 @@ export interface Metadata {
}
// @public (undocumented)
-export type ModifiedAccessConditions = Omit;
+export type ModifiedAccessConditions = Omit;
-// @public
-export function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline;
+export { newPipeline }
// @public (undocumented)
export interface Path {
@@ -1387,12 +1302,8 @@ export interface PathCreateOptions extends CommonOptions {
umask?: string;
}
-// @public
-export type PathCreateResponse = PathCreateHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: PathCreateHeaders;
- };
-};
+// @public (undocumented)
+export type PathCreateResponse = WithResponse;
// @public
export interface PathDeleteHeaders {
@@ -1417,12 +1328,8 @@ export interface PathDeleteOptions extends CommonOptions {
conditions?: DataLakeRequestConditions;
}
-// @public
-export type PathDeleteResponse = PathDeleteHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: PathDeleteHeaders;
- };
-};
+// @public (undocumented)
+export type PathDeleteResponse = WithResponse;
// @public
export interface PathExistsOptions extends CommonOptions {
@@ -1444,15 +1351,6 @@ export interface PathFlushDataHeaders {
version?: string;
}
-// @public
-type PathFlushDataResponse = PathFlushDataHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: PathFlushDataHeaders;
- };
-};
-export { PathFlushDataResponse as FileFlushResponse }
-export { PathFlushDataResponse as FileUploadResponse }
-
// @public (undocumented)
export interface PathGetAccessControlHeaders {
// (undocumented)
@@ -1482,11 +1380,7 @@ export interface PathGetAccessControlOptions extends CommonOptions {
}
// @public (undocumented)
-export type PathGetAccessControlResponse = PathAccessControl & PathGetAccessControlHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathGetPropertiesHeadersModel;
- };
-};
+export type PathGetAccessControlResponse = WithResponse;
// @public
export enum PathGetPropertiesAction {
@@ -1609,11 +1503,7 @@ export interface PathGetPropertiesOptions extends CommonOptions {
}
// @public (undocumented)
-export type PathGetPropertiesResponse = PathGetPropertiesHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathGetPropertiesHeaders;
- };
-};
+export type PathGetPropertiesResponse = WithResponse;
// @public (undocumented)
export interface PathHttpHeaders {
@@ -1679,11 +1569,7 @@ export interface PathMoveOptions extends CommonOptions {
}
// @public (undocumented)
-export type PathMoveResponse = PathRemoveHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathRemoveHeaders;
- };
-};
+export type PathMoveResponse = WithResponse;
// @public (undocumented)
export interface PathPermissions {
@@ -1759,14 +1645,8 @@ export interface PathSetAccessControlOptions extends CommonOptions {
owner?: string;
}
-// @public
-type PathSetAccessControlResponse = PathSetAccessControlHeaders & {
- _response: coreHttp.HttpResponse & {
- parsedHeaders: PathSetAccessControlHeaders;
- };
-};
-export { PathSetAccessControlResponse }
-export { PathSetAccessControlResponse as PathSetPermissionsResponse }
+// @public (undocumented)
+export type PathSetAccessControlResponse = WithResponse;
// @public (undocumented)
export interface PathSetHttpHeadersHeaders {
@@ -1793,11 +1673,7 @@ export interface PathSetHttpHeadersOptions extends CommonOptions {
}
// @public (undocumented)
-export type PathSetHttpHeadersResponse = PathSetHttpHeadersHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathSetHttpHeadersHeaders;
- };
-};
+export type PathSetHttpHeadersResponse = WithResponse;
// @public (undocumented)
export interface PathSetMetadataHeaders {
@@ -1829,11 +1705,7 @@ export interface PathSetMetadataOptions extends CommonOptions {
}
// @public (undocumented)
-export type PathSetMetadataResponse = PathSetMetadataHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathSetMetadataHeaders;
- };
-};
+export type PathSetMetadataResponse = WithResponse;
// @public (undocumented)
export interface PathSetPermissionsOptions extends CommonOptions {
@@ -1847,6 +1719,9 @@ export interface PathSetPermissionsOptions extends CommonOptions {
owner?: string;
}
+// @public (undocumented)
+export type PathSetPermissionsResponse = WithResponse;
+
// @public
export interface PathUndeleteHeaders {
clientRequestId?: string;
@@ -1877,18 +1752,7 @@ export interface PathUpdateHeaders {
xMsContinuation?: string;
}
-// @public
-export class Pipeline {
- constructor(factories: RequestPolicyFactory[], options?: PipelineOptions);
- readonly factories: RequestPolicyFactory[];
- readonly options: PipelineOptions;
- toServiceClientOptions(): ServiceClientOptions;
-}
-
-// @public
-export interface PipelineOptions {
- httpClient?: IHttpClient;
-}
+export { Pipeline }
// @public (undocumented)
export type PublicAccessType = "filesystem" | "file";
@@ -1910,12 +1774,6 @@ export interface RemovePathAccessControlItem {
entityId?: string;
}
-export { RequestPolicy }
-
-export { RequestPolicyFactory }
-
-export { RequestPolicyOptions }
-
export { RestError }
// @public (undocumented)
@@ -2020,13 +1878,7 @@ export interface ServiceGetUserDelegationKeyOptions extends CommonOptions {
}
// @public (undocumented)
-export type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: ServiceGetUserDelegationKeyHeaders;
- bodyAsText: string;
- parsedBody: UserDelegationKeyModel;
- };
-};
+export type ServiceGetUserDelegationKeyResponse = WithResponse;
export { ServiceListContainersSegmentResponse }
@@ -2052,13 +1904,7 @@ export interface ServiceListFileSystemsSegmentHeaders {
}
// @public (undocumented)
-export type ServiceListFileSystemsSegmentResponse = ListFileSystemsSegmentResponse & ServiceListFileSystemsSegmentHeaders & {
- _response: HttpResponse & {
- parsedHeaders: ServiceListFileSystemsSegmentHeaders;
- bodyAsText: string;
- parsedBody: ListFileSystemsSegmentResponse;
- };
-};
+export type ServiceListFileSystemsSegmentResponse = WithResponse;
// @public
export type ServiceRenameFileSystemOptions = ServiceRenameContainerOptions;
@@ -2078,72 +1924,19 @@ export interface SignedIdentifier {
id: string;
}
-// @public
-export class StorageBrowserPolicy extends BaseRequestPolicy {
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions);
- sendRequest(request: WebResource): Promise;
-}
+export { StorageBrowserPolicy }
-// @public
-export class StorageBrowserPolicyFactory implements RequestPolicyFactory {
- create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy;
-}
+export { StorageBrowserPolicyFactory }
-// @public (undocumented)
-export const StorageOAuthScopes: string | string[];
+export { StoragePipelineOptions }
-// @public
-export interface StoragePipelineOptions {
- httpClient?: IHttpClient;
- keepAliveOptions?: KeepAliveOptions;
- proxyOptions?: ProxyOptions;
- retryOptions?: StorageRetryOptions;
- userAgentOptions?: UserAgentOptions;
-}
+export { StorageRetryPolicy }
-// @public
-export interface StorageRetryOptions {
- readonly maxRetryDelayInMs?: number;
- readonly maxTries?: number;
- readonly retryDelayInMs?: number;
- readonly retryPolicyType?: StorageRetryPolicyType;
- readonly secondaryHost?: string;
- readonly tryTimeoutInMs?: number;
-}
+export { StorageRetryPolicyFactory }
-// @public
-export class StorageRetryPolicy extends BaseRequestPolicy {
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions);
- protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise;
- sendRequest(request: WebResource): Promise;
- protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean;
-}
+export { StorageSharedKeyCredential }
-// @public
-export class StorageRetryPolicyFactory implements RequestPolicyFactory {
- constructor(retryOptions?: StorageRetryOptions);
- create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy;
-}
-
-// @public
-export enum StorageRetryPolicyType {
- EXPONENTIAL = 0,
- FIXED = 1
-}
-
-// @public
-export class StorageSharedKeyCredential extends Credential_2 {
- constructor(accountName: string, accountKey: string);
- readonly accountName: string;
- computeHMACSHA256(stringToSign: string): string;
- create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy;
-}
-
-// @public
-export class StorageSharedKeyCredentialPolicy extends CredentialPolicy {
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential);
- protected signRequest(request: WebResource): WebResource;
-}
+export { StorageSharedKeyCredentialPolicy }
// @public (undocumented)
export const ToBlobEndpointHostMappings: string[][];
@@ -2171,8 +1964,6 @@ export interface UserDelegationKey {
export { UserDelegationKeyModel }
-export { WebResource }
-
// (No @packageDocumentation comment for this package)
```
diff --git a/sdk/storage/storage-file-datalake/src/DataLakeFileSystemClient.ts b/sdk/storage/storage-file-datalake/src/DataLakeFileSystemClient.ts
index 399824d6dc7f..11f08500e5d6 100644
--- a/sdk/storage/storage-file-datalake/src/DataLakeFileSystemClient.ts
+++ b/sdk/storage/storage-file-datalake/src/DataLakeFileSystemClient.ts
@@ -1,14 +1,19 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { TokenCredential } from "@azure/core-http";
+import { TokenCredential } from "@azure/core-auth";
import { PagedAsyncIterableIterator, PageSettings } from "@azure/core-paging";
-import { ContainerClient } from "@azure/storage-blob";
+import {
+ ContainerClient,
+ AnonymousCredential,
+ newPipeline,
+ Pipeline,
+ StoragePipelineOptions,
+} from "@azure/storage-blob";
+import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
import { SpanStatusCode } from "@azure/core-tracing";
-import { AnonymousCredential } from "./credentials/AnonymousCredential";
-import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
import { DataLakeLeaseClient } from "./DataLakeLeaseClient";
-import { FileSystem } from "./generated/src/operations";
+import { FileSystemOperationsImpl as FileSystem } from "./generated/src/operations";
import {
AccessPolicy,
FileSystemCreateOptions,
@@ -40,14 +45,15 @@ import {
FileSystemUndeletePathResponse,
FileSystemUndeletePathOption,
ListDeletedPathsSegmentOptions,
+ PathUndeleteHeaders,
} from "./models";
-import { newPipeline, Pipeline, StoragePipelineOptions } from "./Pipeline";
import { StorageClient } from "./StorageClient";
import { toContainerPublicAccessType, toPublicAccessType, toPermissions } from "./transforms";
-import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
+import { createSpan } from "./utils/tracing";
import {
appendToURLPath,
appendToURLQuery,
+ assertResponse,
EscapePath,
windowsFileTimeTicksToTime,
} from "./utils/utils.common";
@@ -600,9 +606,8 @@ export class DataLakeFileSystemClient extends StorageClient {
try {
const rawResponse = await this.fileSystemContext.listPaths(options.recursive || false, {
continuation,
- ...options,
+ ...updatedOptions,
upn: options.userPrincipalName,
- ...convertTracingToRequestOptionsBase(updatedOptions),
});
const response = rawResponse as FileSystemListPathsResponse;
@@ -752,9 +757,8 @@ export class DataLakeFileSystemClient extends StorageClient {
try {
const rawResponse = await this.fileSystemContextToBlobEndpoint.listBlobHierarchySegment({
marker: continuation,
- ...options,
+ ...updatedOptions,
prefix: options.prefix === "" ? undefined : options.prefix,
- ...convertTracingToRequestOptionsBase(updatedOptions),
});
const response = rawResponse as FileSystemListDeletedPathsResponse;
@@ -807,11 +811,13 @@ export class DataLakeFileSystemClient extends StorageClient {
this.pipeline
);
- const rawResponse = await pathClient.blobPathContext.undelete({
- undeleteSource: "?" + DeletionIdKey + "=" + deletionId,
- ...options,
- tracingOptions: updatedOptions.tracingOptions,
- });
+ const rawResponse = assertResponse(
+ await pathClient.blobPathContext.undelete({
+ undeleteSource: "?" + DeletionIdKey + "=" + deletionId,
+ ...options,
+ tracingOptions: updatedOptions.tracingOptions,
+ })
+ );
if (rawResponse.resourceType === PathResultTypeConstants.DirectoryResourceType) {
return {
diff --git a/sdk/storage/storage-file-datalake/src/DataLakeServiceClient.ts b/sdk/storage/storage-file-datalake/src/DataLakeServiceClient.ts
index e1f419c70d87..08ea2428b868 100644
--- a/sdk/storage/storage-file-datalake/src/DataLakeServiceClient.ts
+++ b/sdk/storage/storage-file-datalake/src/DataLakeServiceClient.ts
@@ -3,17 +3,22 @@
import "@azure/core-paging";
-import { getDefaultProxySettings, isNode, TokenCredential } from "@azure/core-http";
+import { TokenCredential } from "@azure/core-auth";
import { PagedAsyncIterableIterator } from "@azure/core-paging";
+import { getDefaultProxySettings } from "@azure/core-rest-pipeline";
+import { isNode } from "@azure/core-util";
import {
+ AnonymousCredential,
BlobServiceClient,
ServiceGetPropertiesOptions,
ServiceSetPropertiesOptions,
ServiceSetPropertiesResponse,
+ Pipeline,
+ StoragePipelineOptions,
+ newPipeline,
} from "@azure/storage-blob";
-
-import { AnonymousCredential } from "./credentials/AnonymousCredential";
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
+
import { DataLakeFileSystemClient } from "./DataLakeFileSystemClient";
import {
FileSystemItem,
@@ -25,7 +30,6 @@ import {
ServiceUndeleteFileSystemOptions,
FileSystemUndeleteResponse,
} from "./models";
-import { Pipeline, StoragePipelineOptions, newPipeline } from "./Pipeline";
import { StorageClient } from "./StorageClient";
import {
appendToURLPath,
diff --git a/sdk/storage/storage-file-datalake/src/Pipeline.ts b/sdk/storage/storage-file-datalake/src/Pipeline.ts
deleted file mode 100644
index 7d9e27db6ebe..000000000000
--- a/sdk/storage/storage-file-datalake/src/Pipeline.ts
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import {
- BaseRequestPolicy,
- bearerTokenAuthenticationPolicy,
- deserializationPolicy,
- disableResponseDecompressionPolicy,
- generateClientRequestIdPolicy,
- HttpClient as IHttpClient,
- HttpHeaders,
- HttpOperationResponse,
- HttpRequestBody,
- isNode,
- isTokenCredential,
- KeepAliveOptions,
- keepAlivePolicy,
- logPolicy,
- ProxyOptions,
- proxyPolicy,
- RequestPolicy,
- RequestPolicyFactory,
- RequestPolicyOptions,
- ServiceClientOptions,
- TokenCredential,
- tracingPolicy,
- UserAgentOptions,
- WebResource,
-} from "@azure/core-http";
-
-import { AnonymousCredential } from "./credentials/AnonymousCredential";
-import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
-import { logger } from "./log";
-import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory";
-import { StorageRetryOptions, StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory";
-import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory";
-import {
- StorageDataLakeLoggingAllowedHeaderNames,
- StorageDataLakeLoggingAllowedQueryParameters,
- StorageOAuthScopes,
-} from "./utils/constants";
-import { getCachedDefaultHttpClient } from "./utils/cache";
-
-// Export following interfaces and types for customers who want to implement their
-// own RequestPolicy or HTTPClient
-export {
- BaseRequestPolicy,
- StorageOAuthScopes,
- deserializationPolicy,
- IHttpClient,
- HttpHeaders,
- HttpRequestBody,
- HttpOperationResponse,
- WebResource,
- RequestPolicyFactory,
- RequestPolicy,
- RequestPolicyOptions,
-};
-
-/**
- * Option interface for Pipeline constructor.
- */
-export interface PipelineOptions {
- /**
- * Optional. Configures the HTTP client to send requests and receive responses.
- */
- httpClient?: IHttpClient;
-}
-
-/**
- * A Pipeline class containing HTTP request policies.
- * You can create a default Pipeline by calling {@link newPipeline}.
- * Or you can create a Pipeline with your own policies by the constructor of Pipeline.
- *
- * Refer to {@link newPipeline} and provided policies before implementing your
- * customized Pipeline.
- */
-export class Pipeline {
- /**
- * A list of chained request policy factories.
- */
- public readonly factories: RequestPolicyFactory[];
- /**
- * Configures pipeline logger and HTTP client.
- */
- public readonly options: PipelineOptions;
-
- /**
- * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.
- *
- * @param factories -
- * @param options -
- */
- constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {
- this.factories = factories;
- // when options.httpClient is not specified, passing in a DefaultHttpClient instance to
- // avoid each client creating its own http client.
- this.options = {
- ...options,
- httpClient: options.httpClient || getCachedDefaultHttpClient(),
- };
- }
-
- /**
- * Transfer Pipeline object to ServiceClientOptions object which is required by
- * ServiceClient constructor.
- *
- * @returns The ServiceClientOptions object from this Pipeline.
- */
- public toServiceClientOptions(): ServiceClientOptions {
- return {
- httpClient: this.options.httpClient,
- requestPolicyFactories: this.factories,
- };
- }
-}
-
-/**
- * Options interface for the {@link newPipeline} function.
- */
-export interface StoragePipelineOptions {
- /**
- * Options to configure a proxy for outgoing requests.
- */
- proxyOptions?: ProxyOptions;
- /**
- * Options for adding user agent details to outgoing requests.
- */
- userAgentOptions?: UserAgentOptions;
- /**
- * Configures the built-in retry policy behavior.
- */
- retryOptions?: StorageRetryOptions;
- /**
- * Keep alive configurations. Default keep-alive is enabled.
- */
- keepAliveOptions?: KeepAliveOptions;
-
- /**
- * Configures the HTTP client to send requests and receive responses.
- */
- httpClient?: IHttpClient;
-}
-
-/**
- * Creates a new Pipeline object with Credential provided.
- *
- * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
- * @param pipelineOptions - Optional. Options.
- * @returns A new Pipeline object.
- */
-export function newPipeline(
- credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,
- pipelineOptions: StoragePipelineOptions = {}
-): Pipeline {
- if (credential === undefined) {
- credential = new AnonymousCredential();
- }
- // Order is important. Closer to the API at the top & closer to the network at the bottom.
- // The credential's policy factory must appear close to the wire so it can sign any
- // changes made by other factories (like UniqueRequestIDPolicyFactory)
-
- const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);
- const factories: RequestPolicyFactory[] = [
- tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),
- keepAlivePolicy(pipelineOptions.keepAliveOptions),
- telemetryPolicy,
- generateClientRequestIdPolicy(),
- new StorageBrowserPolicyFactory(),
- new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors
- deserializationPolicy(), // Default deserializationPolicy is provided by protocol layer
- logPolicy({
- logger: logger.info,
- allowedHeaderNames: StorageDataLakeLoggingAllowedHeaderNames,
- allowedQueryParameters: StorageDataLakeLoggingAllowedQueryParameters,
- }),
- ];
-
- if (isNode) {
- // policies only available in Node.js runtime, not in browsers
- factories.push(proxyPolicy(pipelineOptions.proxyOptions));
- factories.push(disableResponseDecompressionPolicy());
- }
- factories.push(
- isTokenCredential(credential)
- ? attachCredential(
- bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),
- credential
- )
- : credential
- );
-
- return new Pipeline(factories, pipelineOptions);
-}
-
-/**
- * Attach a TokenCredential to an object.
- *
- * @param thing -
- * @param credential -
- */
-function attachCredential(thing: T, credential: TokenCredential): T {
- (thing as any).credential = credential;
- return thing;
-}
diff --git a/sdk/storage/storage-file-datalake/src/StorageBrowserPolicyFactory.ts b/sdk/storage/storage-file-datalake/src/StorageBrowserPolicyFactory.ts
deleted file mode 100644
index 9a7c4b614ced..000000000000
--- a/sdk/storage/storage-file-datalake/src/StorageBrowserPolicyFactory.ts
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "@azure/core-http";
-import { StorageBrowserPolicy } from "./policies/StorageBrowserPolicy";
-export { StorageBrowserPolicy };
-
-/**
- * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.
- */
-export class StorageBrowserPolicyFactory implements RequestPolicyFactory {
- /**
- * Creates a StorageBrowserPolicyFactory object.
- *
- * @param nextPolicy -
- * @param options -
- */
- public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {
- return new StorageBrowserPolicy(nextPolicy, options);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/StorageClient.ts b/sdk/storage/storage-file-datalake/src/StorageClient.ts
index ddfd6337e51b..17e3be66991e 100644
--- a/sdk/storage/storage-file-datalake/src/StorageClient.ts
+++ b/sdk/storage/storage-file-datalake/src/StorageClient.ts
@@ -1,23 +1,52 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { isNode, isTokenCredential, TokenCredential } from "@azure/core-http";
-import { OperationTracingOptions } from "@azure/core-tracing";
-
-import { AnonymousCredential } from "./credentials/AnonymousCredential";
+import { TokenCredential } from "@azure/core-auth";
+import { StorageContextClient } from "./StorageContextClient";
+import { StorageClient as StorageClientContext } from "./generated/src";
+import {
+ AnonymousCredential,
+ Pipeline,
+ StoragePipelineOptions,
+ BlobServiceClient,
+} from "@azure/storage-blob";
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
-import { StorageClientContext } from "./generated/src/storageClientContext";
-import { Pipeline } from "./Pipeline";
import { toBlobEndpointUrl, toDfsEndpointUrl } from "./transforms";
import { escapeURLPath, getAccountNameFromUrl, getURLScheme, iEqual } from "./utils/utils.common";
+import { ExtendedServiceClientOptions } from "@azure/core-http-compat";
+import { HttpClient, Pipeline as CorePipeline } from "@azure/core-rest-pipeline";
+let testOnlyHttpClient: HttpClient | undefined;
/**
- * An interface for options common to every remote operation.
+ * @internal
+ * Set a custom default http client for testing purposes
*/
-export interface CommonOptions {
- /**
- * Options to configure spans created when tracing is enabled.
- */
- tracingOptions?: OperationTracingOptions;
+export function setTestOnlySetHttpClient(httpClient: HttpClient): void {
+ testOnlyHttpClient = httpClient;
+}
+
+// This function relies on the Pipeline already being initialized by a storage-blob client
+function getCoreClientOptions(pipeline: Pipeline): ExtendedServiceClientOptions {
+ const { httpClient: v1Client, ...restOptions } = pipeline.options as StoragePipelineOptions;
+ let httpClient: HttpClient = (pipeline as any)._coreHttpClient;
+ if (!httpClient) {
+ throw new Error("Pipeline not correctly initialized; missing V2 HttpClient");
+ }
+
+ // check if we're running in a browser test mode and use the xhr client
+ if (testOnlyHttpClient && httpClient !== testOnlyHttpClient) {
+ httpClient = testOnlyHttpClient;
+ (pipeline as any)._coreHttpClient = testOnlyHttpClient;
+ }
+
+ const corePipeline: CorePipeline = (pipeline as any)._corePipeline;
+ if (!corePipeline) {
+ throw new Error("Pipeline not correctly initialized; missing V2 Pipeline");
+ }
+ return {
+ ...restOptions,
+ httpClient,
+ pipeline: corePipeline,
+ };
}
/**
@@ -82,30 +111,21 @@ export abstract class StorageClient {
this.dfsEndpointUrl = toDfsEndpointUrl(this.url);
this.accountName = getAccountNameFromUrl(this.blobEndpointUrl);
this.pipeline = pipeline;
- this.storageClientContext = new StorageClientContext(
+ // creating this BlobServiceClient allows us to use the converted V2 Pipeline attached to `pipeline`.
+ const blobClient = new BlobServiceClient(url, pipeline);
+ this.storageClientContext = new StorageContextClient(
this.dfsEndpointUrl,
- pipeline.toServiceClientOptions()
+ getCoreClientOptions(pipeline)
);
- this.storageClientContextToBlobEndpoint = new StorageClientContext(
+
+ this.storageClientContextToBlobEndpoint = new StorageContextClient(
this.blobEndpointUrl,
- pipeline.toServiceClientOptions()
+ getCoreClientOptions(pipeline)
);
this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
- this.credential = new AnonymousCredential();
- for (const factory of this.pipeline.factories) {
- if (
- (isNode && factory instanceof StorageSharedKeyCredential) ||
- factory instanceof AnonymousCredential
- ) {
- this.credential = factory;
- } else if (isTokenCredential((factory as any).credential)) {
- // Only works if the factory has been attached a "credential" property.
- // We do that in newPipeline() when using TokenCredential.
- this.credential = (factory as any).credential;
- }
- }
+ this.credential = blobClient.credential;
// Override protocol layer's default content-type
const storageClientContext = this.storageClientContext as any;
diff --git a/sdk/storage/storage-file-datalake/src/StorageContextClient.ts b/sdk/storage/storage-file-datalake/src/StorageContextClient.ts
new file mode 100644
index 000000000000..fb7ef16e93ec
--- /dev/null
+++ b/sdk/storage/storage-file-datalake/src/StorageContextClient.ts
@@ -0,0 +1,47 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+import { OperationArguments, OperationSpec } from "@azure/core-client";
+import { isNode } from "@azure/core-util";
+import { StorageClient } from "./generated/src";
+
+/**
+ * @internal
+ */
+export class StorageContextClient extends StorageClient {
+ async sendOperationRequest(
+ operationArguments: OperationArguments,
+ operationSpec: OperationSpec
+ ): Promise {
+ const operationSpecToSend = { ...operationSpec };
+
+ if (
+ !isNode &&
+ !operationSpec.requestBody &&
+ operationSpec.headerParameters?.some(
+ (param) => param.mapper.serializedName === "Content-Length"
+ )
+ ) {
+ operationSpecToSend.mediaType = "text";
+ operationSpecToSend.requestBody = {
+ parameterPath: "body",
+ mapper: {
+ serializedName: "body",
+ isConstant: true,
+ defaultValue: "",
+ type: {
+ name: "String",
+ },
+ },
+ };
+ }
+
+ if (
+ operationSpecToSend.path === "/{filesystem}" ||
+ operationSpecToSend.path === "/{filesystem}/{path}"
+ ) {
+ operationSpecToSend.path = "";
+ }
+ return super.sendOperationRequest(operationArguments, operationSpecToSend);
+ }
+}
diff --git a/sdk/storage/storage-file-datalake/src/StorageRetryPolicyFactory.ts b/sdk/storage/storage-file-datalake/src/StorageRetryPolicyFactory.ts
deleted file mode 100644
index fbd217bf7048..000000000000
--- a/sdk/storage/storage-file-datalake/src/StorageRetryPolicyFactory.ts
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "@azure/core-http";
-import { StorageRetryPolicy, StorageRetryPolicyType } from "./policies/StorageRetryPolicy";
-
-export { StorageRetryPolicyType, StorageRetryPolicy };
-
-/**
- * Storage Blob retry options interface.
- */
-export interface StorageRetryOptions {
- /**
- * Optional. StorageRetryPolicyType, default is exponential retry policy.
- */
- readonly retryPolicyType?: StorageRetryPolicyType;
-
- /**
- * Optional. Max try number of attempts, default is 4.
- * A value of 1 means 1 try and no retries.
- * A value smaller than 1 means default retry number of attempts.
- */
- readonly maxTries?: number;
-
- /**
- * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.
- * A value of zero or undefined means no default timeout on SDK client, Azure
- * Storage server's default timeout policy will be used.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations
- */
- readonly tryTimeoutInMs?: number;
-
- /**
- * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).
- * The delay increases (exponentially or linearly) with each retry up to a maximum specified by
- * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.
- */
- readonly retryDelayInMs?: number;
-
- /**
- * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).
- * If you specify 0, then you must also specify 0 for retryDelayInMs.
- */
- readonly maxRetryDelayInMs?: number;
-
- /**
- * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined
- * (the default) then operations are not retried against another host.
- *
- * NOTE: Before setting this field, make sure you understand the issues around
- * reading stale and potentially-inconsistent data at
- * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}
- */
- readonly secondaryHost?: string;
-}
-
-/**
- * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.
- */
-export class StorageRetryPolicyFactory implements RequestPolicyFactory {
- private retryOptions?: StorageRetryOptions;
-
- /**
- * Creates an instance of StorageRetryPolicyFactory.
- * @param retryOptions -
- */
- constructor(retryOptions?: StorageRetryOptions) {
- this.retryOptions = retryOptions;
- }
-
- /**
- * Creates a StorageRetryPolicy object.
- *
- * @param nextPolicy -
- * @param options -
- */
- public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {
- return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/TelemetryPolicyFactory.ts b/sdk/storage/storage-file-datalake/src/TelemetryPolicyFactory.ts
deleted file mode 100644
index eb6829e5c01c..000000000000
--- a/sdk/storage/storage-file-datalake/src/TelemetryPolicyFactory.ts
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import {
- isNode,
- RequestPolicy,
- RequestPolicyFactory,
- RequestPolicyOptions,
- UserAgentOptions,
-} from "@azure/core-http";
-import * as os from "os";
-
-import { TelemetryPolicy } from "./policies/TelemetryPolicy";
-import { SDK_VERSION } from "./utils/constants";
-
-/**
- * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.
- */
-export class TelemetryPolicyFactory implements RequestPolicyFactory {
- /**
- * @internal
- */
- public readonly telemetryString: string;
-
- /**
- * Creates an instance of TelemetryPolicyFactory.
- * @param telemetry -
- */
- constructor(telemetry?: UserAgentOptions) {
- const userAgentInfo: string[] = [];
-
- if (isNode) {
- if (telemetry) {
- const telemetryString = telemetry.userAgentPrefix || "";
- if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {
- userAgentInfo.push(telemetryString);
- }
- }
-
- // e.g. azsdk-js-storagedatalake/10.0.0
- const libInfo = `azsdk-js-storagedatalake/${SDK_VERSION}`;
- if (userAgentInfo.indexOf(libInfo) === -1) {
- userAgentInfo.push(libInfo);
- }
-
- // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)
- let runtimeInfo = `(NODE-VERSION ${process.version})`;
- if (os) {
- runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;
- }
- if (userAgentInfo.indexOf(runtimeInfo) === -1) {
- userAgentInfo.push(runtimeInfo);
- }
- }
-
- this.telemetryString = userAgentInfo.join(" ");
- }
-
- /**
- * Creates a TelemetryPolicy object.
- *
- * @param nextPolicy -
- * @param options -
- */
- public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {
- return new TelemetryPolicy(nextPolicy, options, this.telemetryString);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/clients.ts b/sdk/storage/storage-file-datalake/src/clients.ts
index 0ac6cd79372e..602f49f58f4a 100644
--- a/sdk/storage/storage-file-datalake/src/clients.ts
+++ b/sdk/storage/storage-file-datalake/src/clients.ts
@@ -1,15 +1,23 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { HttpRequestBody, isNode, TokenCredential } from "@azure/core-http";
-import { BlobClient, BlockBlobClient } from "@azure/storage-blob";
+import { TokenCredential } from "@azure/core-auth";
+import { RequestBodyType as HttpRequestBody } from "@azure/core-rest-pipeline";
+import { isNode } from "@azure/core-util";
+import {
+ AnonymousCredential,
+ BlobClient,
+ BlockBlobClient,
+ newPipeline,
+ Pipeline,
+ StoragePipelineOptions,
+} from "@azure/storage-blob";
+import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
import { SpanStatusCode } from "@azure/core-tracing";
import { Readable } from "stream";
import { BufferScheduler } from "../../storage-common/src";
-import { AnonymousCredential } from "./credentials/AnonymousCredential";
-import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
import { DataLakeLeaseClient } from "./DataLakeLeaseClient";
-import { Path } from "./generated/src/operations";
+import { PathOperationsImpl as Path } from "./generated/src/operations";
import {
AccessControlChanges,
DirectoryCreateIfNotExistsOptions,
@@ -67,14 +75,14 @@ import {
RemovePathAccessControlItem,
} from "./models";
import { PathSetAccessControlRecursiveMode } from "./models.internal";
-import { newPipeline, Pipeline, StoragePipelineOptions } from "./Pipeline";
import { generateDataLakeSASQueryParameters } from "./sas/DataLakeSASSignatureValues";
import { StorageClient } from "./StorageClient";
import {
toAccessControlChangeFailureArray,
+ toAcl,
toAclString,
toBlobCpkInfo,
- toPathGetAccessControlResponse,
+ toPermissions,
toPermissionsString,
toProperties,
} from "./transforms";
@@ -89,16 +97,26 @@ import {
FILE_UPLOAD_MAX_CHUNK_SIZE,
} from "./utils/constants";
import { DataLakeAclChangeFailedError } from "./utils/DataLakeAclChangeFailedError";
-import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
+import { createSpan } from "./utils/tracing";
import {
appendToURLPath,
appendToURLQuery,
+ assertResponse,
ensureCpkIfSpecified,
getURLPathAndQuery,
setURLPath,
setURLQueries,
} from "./utils/utils.common";
import { fsCreateReadStream, fsStat } from "./utils/utils.node";
+import {
+ PathAppendDataHeaders,
+ PathCreateHeaders,
+ PathDeleteHeaders,
+ PathFlushDataHeaders,
+ PathGetPropertiesHeaders,
+ PathSetAccessControlHeaders,
+ PathSetExpiryHeaders,
+} from "./generated/src";
/**
* A DataLakePathClient represents a URL to the Azure Storage path (directory or file).
@@ -158,12 +176,11 @@ export class DataLakePathClient extends StorageClient {
let response;
try {
response = await this.pathContext.setAccessControlRecursive(mode, {
- ...options,
+ ...updatedOptions,
acl: toAclString(acl as PathAccessControlItem[]),
maxRecords: options.batchSize,
continuation: continuationToken,
forceFlag: options.continueOnFailure,
- ...convertTracingToRequestOptionsBase(updatedOptions),
});
} catch (e: any) {
throw new DataLakeAclChangeFailedError(e, continuationToken);
@@ -345,18 +362,19 @@ export class DataLakePathClient extends StorageClient {
}
}
- return await this.pathContext.create({
- ...options,
- resource: resourceType,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- properties: toProperties(options.metadata),
- cpkInfo: options.customerProvidedKey,
- acl: options.acl ? toAclString(options.acl) : undefined,
- expiryOptions: expiryOptions,
- expiresOn: expiresOn,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- });
+ return assertResponse(
+ await this.pathContext.create({
+ ...updatedOptions,
+ resource: resourceType,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ properties: toProperties(options.metadata),
+ cpkInfo: options.customerProvidedKey,
+ acl: options.acl ? toAclString(options.acl) : undefined,
+ expiryOptions: expiryOptions,
+ expiresOn: expiresOn,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -456,19 +474,21 @@ export class DataLakePathClient extends StorageClient {
options.conditions = options.conditions || {};
const { span, updatedOptions } = createSpan("DataLakePathClient-delete", options);
try {
- let continuation;
- let response;
+ let continuation: string | undefined;
+ let response: PathDeleteResponse;
// How to handle long delete loop?
do {
- response = await this.pathContext.delete({
- continuation,
- recursive,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- abortSignal: options.abortSignal,
- });
+ response = assertResponse(
+ await this.pathContext.delete({
+ ...updatedOptions,
+ continuation,
+ recursive,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ abortSignal: options.abortSignal,
+ })
+ );
continuation = response.continuation;
} while (continuation !== undefined && continuation !== "");
@@ -539,15 +559,22 @@ export class DataLakePathClient extends StorageClient {
options.conditions = options.conditions || {};
const { span, updatedOptions } = createSpan("DataLakePathClient-getAccessControl", options);
try {
- const response = await this.pathContext.getProperties({
- action: "getAccessControl",
- upn: options.userPrincipalName,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- abortSignal: options.abortSignal,
- });
- return toPathGetAccessControlResponse(response);
+ const response = assertResponse(
+ await this.pathContext.getProperties({
+ ...updatedOptions,
+ action: "getAccessControl",
+ upn: options.userPrincipalName,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ abortSignal: options.abortSignal,
+ })
+ );
+ return {
+ ...response,
+ _response: response._response,
+ permissions: toPermissions(response.permissions),
+ acl: toAcl(response.acl),
+ };
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -574,13 +601,14 @@ export class DataLakePathClient extends StorageClient {
options.conditions = options.conditions || {};
const { span, updatedOptions } = createSpan("DataLakePathClient-setAccessControl", options);
try {
- return await this.pathContext.setAccessControl({
- ...options,
- acl: toAclString(acl),
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- });
+ return assertResponse(
+ await this.pathContext.setAccessControl({
+ ...updatedOptions,
+ acl: toAclString(acl),
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -694,13 +722,14 @@ export class DataLakePathClient extends StorageClient {
options.conditions = options.conditions || {};
const { span, updatedOptions } = createSpan("DataLakePathClient-setPermissions", options);
try {
- return await this.pathContext.setAccessControl({
- ...options,
- permissions: toPermissionsString(permissions),
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- });
+ return assertResponse(
+ await this.pathContext.setAccessControl({
+ ...updatedOptions,
+ permissions: toPermissionsString(permissions),
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -884,21 +913,23 @@ export class DataLakePathClient extends StorageClient {
const destPathClient = new DataLakePathClient(destinationUrl, this.pipeline);
try {
- return await destPathClient.pathContext.create({
- mode: "legacy", // By default
- renameSource,
- sourceLeaseId: options.conditions.leaseId,
- leaseAccessConditions: options.destinationConditions,
- sourceModifiedAccessConditions: {
- sourceIfMatch: options.conditions.ifMatch,
- sourceIfNoneMatch: options.conditions.ifNoneMatch,
- sourceIfModifiedSince: options.conditions.ifModifiedSince,
- sourceIfUnmodifiedSince: options.conditions.ifUnmodifiedSince,
- },
- modifiedAccessConditions: options.destinationConditions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- abortSignal: options.abortSignal,
- });
+ return assertResponse(
+ await destPathClient.pathContext.create({
+ ...updatedOptions,
+ mode: "legacy", // By default
+ renameSource,
+ sourceLeaseId: options.conditions.leaseId,
+ leaseAccessConditions: options.destinationConditions,
+ sourceModifiedAccessConditions: {
+ sourceIfMatch: options.conditions.ifMatch,
+ sourceIfNoneMatch: options.conditions.ifNoneMatch,
+ sourceIfModifiedSince: options.conditions.ifModifiedSince,
+ sourceIfUnmodifiedSince: options.conditions.ifUnmodifiedSince,
+ },
+ modifiedAccessConditions: options.destinationConditions,
+ abortSignal: options.abortSignal,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -957,11 +988,7 @@ export class DataLakeDirectoryClient extends DataLakePathClient {
const { span, updatedOptions } = createSpan("DataLakeDirectoryClient-create", options);
try {
return await super.create("directory", {
- ...options,
- tracingOptions: {
- ...options.tracingOptions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- },
+ ...updatedOptions,
});
} catch (e: any) {
span.setStatus({
@@ -1018,11 +1045,7 @@ export class DataLakeDirectoryClient extends DataLakePathClient {
);
try {
return await super.createIfNotExists("directory", {
- ...options,
- tracingOptions: {
- ...options.tracingOptions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- },
+ ...updatedOptions,
});
} catch (e: any) {
span.setStatus({
@@ -1213,11 +1236,7 @@ export class DataLakeFileClient extends DataLakePathClient {
const { span, updatedOptions } = createSpan("DataLakeFileClient-create", options);
try {
return await super.create("file", {
- ...options,
- tracingOptions: {
- ...options.tracingOptions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- },
+ ...updatedOptions,
});
} catch (e: any) {
span.setStatus({
@@ -1271,11 +1290,7 @@ export class DataLakeFileClient extends DataLakePathClient {
const { span, updatedOptions } = createSpan("DataLakeFileClient-createIfNotExists", options);
try {
return await super.createIfNotExists("file", {
- ...options,
- tracingOptions: {
- ...options.tracingOptions,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- },
+ ...updatedOptions,
});
} catch (e: any) {
span.setStatus({
@@ -1397,24 +1412,26 @@ export class DataLakeFileClient extends DataLakePathClient {
const { span, updatedOptions } = createSpan("DataLakeFileClient-append", options);
try {
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return await this.pathContextInternal.appendData(body, {
- pathHttpHeaders: {
- contentMD5: options.transactionalContentMD5,
- },
- abortSignal: options.abortSignal,
- position: offset,
- contentLength: length,
- leaseAccessConditions: options.conditions,
- requestOptions: {
- onUploadProgress: options.onProgress,
- },
- cpkInfo: options.customerProvidedKey,
- flush: options.flush,
- proposedLeaseId: options.proposedLeaseId,
- leaseDuration: options.leaseDuration,
- leaseAction: options.leaseAction,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- });
+ return assertResponse(
+ await this.pathContextInternal.appendData(body, {
+ ...updatedOptions,
+ pathHttpHeaders: {
+ contentMD5: options.transactionalContentMD5,
+ },
+ abortSignal: options.abortSignal,
+ position: offset,
+ contentLength: length,
+ leaseAccessConditions: options.conditions,
+ requestOptions: {
+ onUploadProgress: options.onProgress,
+ },
+ cpkInfo: options.customerProvidedKey,
+ flush: options.flush,
+ proposedLeaseId: options.proposedLeaseId,
+ leaseDuration: options.leaseDuration,
+ leaseAction: options.leaseAction,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -1443,18 +1460,19 @@ export class DataLakeFileClient extends DataLakePathClient {
const { span, updatedOptions } = createSpan("DataLakeFileClient-flush", options);
try {
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return await this.pathContextInternal.flushData({
- ...options,
- position,
- contentLength: 0,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
- cpkInfo: options.customerProvidedKey,
- proposedLeaseId: options.proposedLeaseId,
- leaseDuration: options.leaseDuration,
- leaseAction: options.leaseAction,
- ...convertTracingToRequestOptionsBase(updatedOptions),
- });
+ return assertResponse(
+ await this.pathContextInternal.flushData({
+ ...updatedOptions,
+ position,
+ contentLength: 0,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ cpkInfo: options.customerProvidedKey,
+ proposedLeaseId: options.proposedLeaseId,
+ leaseDuration: options.leaseDuration,
+ leaseAction: options.leaseAction,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
@@ -2003,10 +2021,12 @@ export class DataLakeFileClient extends DataLakePathClient {
}
const adaptedOptions = { ...options, expiresOn };
- return await this.pathContextInternalToBlobEndpoint.setExpiry(mode, {
- ...adaptedOptions,
- tracingOptions: updatedOptions.tracingOptions,
- });
+ return assertResponse(
+ await this.pathContextInternalToBlobEndpoint.setExpiry(mode, {
+ ...adaptedOptions,
+ tracingOptions: updatedOptions.tracingOptions,
+ })
+ );
} catch (e: any) {
span.setStatus({
code: SpanStatusCode.ERROR,
diff --git a/sdk/storage/storage-file-datalake/src/credentials/AnonymousCredential.ts b/sdk/storage/storage-file-datalake/src/credentials/AnonymousCredential.ts
deleted file mode 100644
index c648c39b932b..000000000000
--- a/sdk/storage/storage-file-datalake/src/credentials/AnonymousCredential.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyOptions } from "@azure/core-http";
-
-import { AnonymousCredentialPolicy } from "../policies/AnonymousCredentialPolicy";
-import { Credential } from "./Credential";
-
-/**
- * AnonymousCredential provides a credentialPolicyCreator member used to create
- * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with
- * HTTP(S) requests that read public resources or for use with Shared Access
- * Signatures (SAS).
- */
-export class AnonymousCredential extends Credential {
- /**
- * Creates an {@link AnonymousCredentialPolicy} object.
- *
- * @param nextPolicy -
- * @param options -
- */
- public create(
- nextPolicy: RequestPolicy,
- options: RequestPolicyOptions
- ): AnonymousCredentialPolicy {
- return new AnonymousCredentialPolicy(nextPolicy, options);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/credentials/Credential.ts b/sdk/storage/storage-file-datalake/src/credentials/Credential.ts
deleted file mode 100644
index bd795dcd129d..000000000000
--- a/sdk/storage/storage-file-datalake/src/credentials/Credential.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "@azure/core-http";
-import { CredentialPolicy } from "../policies/CredentialPolicy";
-
-/**
- * Credential is an abstract class for Azure Storage HTTP requests signing. This
- * class will host an credentialPolicyCreator factory which generates CredentialPolicy.
- */
-export abstract class Credential implements RequestPolicyFactory {
- /**
- * Creates a RequestPolicy object.
- *
- * @param _nextPolicy -
- * @param _options -
- */
- public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {
- throw new Error("Method should be implemented in children classes.");
- }
-}
-
-/**
- * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.
- */
-export type CredentialPolicyCreator = (
- nextPolicy: RequestPolicy,
- options: RequestPolicyOptions
-) => CredentialPolicy;
diff --git a/sdk/storage/storage-file-datalake/src/credentials/StorageSharedKeyCredential.ts b/sdk/storage/storage-file-datalake/src/credentials/StorageSharedKeyCredential.ts
index 2e68263d05d9..daa3ee4fa5f0 100644
--- a/sdk/storage/storage-file-datalake/src/credentials/StorageSharedKeyCredential.ts
+++ b/sdk/storage/storage-file-datalake/src/credentials/StorageSharedKeyCredential.ts
@@ -1,58 +1,4 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { createHmac } from "crypto";
-import { RequestPolicy, RequestPolicyOptions } from "@azure/core-http";
-
-import { StorageSharedKeyCredentialPolicy } from "../policies/StorageSharedKeyCredentialPolicy";
-import { Credential } from "./Credential";
-
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * StorageSharedKeyCredential for account key authorization of Azure Storage service.
- */
-export class StorageSharedKeyCredential extends Credential {
- /**
- * Azure Storage account name; readonly.
- */
- public readonly accountName: string;
-
- /**
- * Azure Storage account key; readonly.
- */
- private readonly accountKey: Buffer;
-
- /**
- * Creates an instance of StorageSharedKeyCredential.
- * @param accountName -
- * @param accountKey -
- */
- constructor(accountName: string, accountKey: string) {
- super();
- this.accountName = accountName;
- this.accountKey = Buffer.from(accountKey, "base64");
- }
-
- /**
- * Creates a StorageSharedKeyCredentialPolicy object.
- *
- * @param nextPolicy -
- * @param options -
- */
- public create(
- nextPolicy: RequestPolicy,
- options: RequestPolicyOptions
- ): StorageSharedKeyCredentialPolicy {
- return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);
- }
-
- /**
- * Generates a hash signature for an HTTP request or for a SAS.
- *
- * @param stringToSign -
- */
- public computeHMACSHA256(stringToSign: string): string {
- return createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64");
- }
-}
+export { StorageSharedKeyCredential } from "@azure/storage-blob";
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/index.ts b/sdk/storage/storage-file-datalake/src/generated/src/index.ts
index 40fd6f83562e..db8d68b5328c 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/index.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/index.ts
@@ -8,4 +8,4 @@
export * from "./models";
export { StorageClient } from "./storageClient";
-export { StorageClientContext } from "./storageClientContext";
+export * from "./operationsInterfaces";
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/models/index.ts b/sdk/storage/storage-file-datalake/src/generated/src/models/index.ts
index ba942fce24aa..2d70d5e6ffa0 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/models/index.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/models/index.ts
@@ -6,7 +6,8 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import * as coreHttp from "@azure/core-http";
+import * as coreClient from "@azure/core-client";
+import * as coreHttpCompat from "@azure/core-http-compat";
export interface FileSystemList {
filesystems?: FileSystem[];
@@ -714,8 +715,10 @@ export interface CpkInfo {
}
/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */
-export const enum KnownEncryptionAlgorithmType {
+export enum KnownEncryptionAlgorithmType {
+ /** None */
None = "None",
+ /** AES256 */
AES256 = "AES256"
}
@@ -723,7 +726,7 @@ export const enum KnownEncryptionAlgorithmType {
* Defines values for EncryptionAlgorithmType. \
* {@link KnownEncryptionAlgorithmType} can be used interchangeably with EncryptionAlgorithmType,
* this enum contains the known values that the service supports.
- * ### Know values supported by the service
+ * ### Known values supported by the service
* **None** \
* **AES256**
*/
@@ -774,7 +777,7 @@ export type LeaseAction =
/** Optional parameters. */
export interface ServiceListFileSystemsOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Filters results to filesystems within the specified prefix. */
prefix?: string;
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
@@ -789,22 +792,11 @@ export interface ServiceListFileSystemsOptionalParams
/** Contains response data for the listFileSystems operation. */
export type ServiceListFileSystemsResponse = ServiceListFileSystemsHeaders &
- FileSystemList & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: FileSystemList;
- /** The parsed HTTP response headers. */
- parsedHeaders: ServiceListFileSystemsHeaders;
- };
- };
+ FileSystemList;
/** Optional parameters. */
export interface FileSystemCreateOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */
@@ -814,17 +806,11 @@ export interface FileSystemCreateOptionalParams
}
/** Contains response data for the create operation. */
-export type FileSystemCreateResponse = FileSystemCreateHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemCreateHeaders;
- };
-};
+export type FileSystemCreateResponse = FileSystemCreateHeaders;
/** Optional parameters. */
export interface FileSystemSetPropertiesOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
@@ -836,17 +822,11 @@ export interface FileSystemSetPropertiesOptionalParams
}
/** Contains response data for the setProperties operation. */
-export type FileSystemSetPropertiesResponse = FileSystemSetPropertiesHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemSetPropertiesHeaders;
- };
-};
+export type FileSystemSetPropertiesResponse = FileSystemSetPropertiesHeaders;
/** Optional parameters. */
export interface FileSystemGetPropertiesOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */
@@ -854,17 +834,11 @@ export interface FileSystemGetPropertiesOptionalParams
}
/** Contains response data for the getProperties operation. */
-export type FileSystemGetPropertiesResponse = FileSystemGetPropertiesHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemGetPropertiesHeaders;
- };
-};
+export type FileSystemGetPropertiesResponse = FileSystemGetPropertiesHeaders;
/** Optional parameters. */
export interface FileSystemDeleteOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
@@ -874,17 +848,11 @@ export interface FileSystemDeleteOptionalParams
}
/** Contains response data for the delete operation. */
-export type FileSystemDeleteResponse = FileSystemDeleteHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemDeleteHeaders;
- };
-};
+export type FileSystemDeleteResponse = FileSystemDeleteHeaders;
/** Optional parameters. */
export interface FileSystemListPathsOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** An optional value that specifies the maximum number of items to return. If omitted or greater than 5,000, the response will include up to 5,000 items. */
@@ -900,23 +868,11 @@ export interface FileSystemListPathsOptionalParams
}
/** Contains response data for the listPaths operation. */
-export type FileSystemListPathsResponse = FileSystemListPathsHeaders &
- PathList & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: PathList;
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemListPathsHeaders;
- };
- };
+export type FileSystemListPathsResponse = FileSystemListPathsHeaders & PathList;
/** Optional parameters. */
export interface FileSystemListBlobHierarchySegmentOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Filters results to filesystems within the specified prefix. */
prefix?: string;
/** An optional value that specifies the maximum number of items to return. If omitted or greater than 5,000, the response will include up to 5,000 items. */
@@ -935,21 +891,10 @@ export interface FileSystemListBlobHierarchySegmentOptionalParams
/** Contains response data for the listBlobHierarchySegment operation. */
export type FileSystemListBlobHierarchySegmentResponse = FileSystemListBlobHierarchySegmentHeaders &
- ListBlobsHierarchySegmentResponse & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: ListBlobsHierarchySegmentResponse;
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemListBlobHierarchySegmentHeaders;
- };
- };
+ ListBlobsHierarchySegmentResponse;
/** Optional parameters. */
-export interface PathCreateOptionalParams extends coreHttp.OperationOptions {
+export interface PathCreateOptionalParams extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -997,16 +942,10 @@ export interface PathCreateOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the create operation. */
-export type PathCreateResponse = PathCreateHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathCreateHeaders;
- };
-};
+export type PathCreateResponse = PathCreateHeaders;
/** Optional parameters. */
-export interface PathUpdateOptionalParams extends coreHttp.OperationOptions {
+export interface PathUpdateOptionalParams extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1045,21 +984,10 @@ export interface PathUpdateOptionalParams extends coreHttp.OperationOptions {
/** Contains response data for the update operation. */
export type PathUpdateResponse = PathUpdateHeaders &
- SetAccessControlRecursiveResponse & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: SetAccessControlRecursiveResponse;
- /** The parsed HTTP response headers. */
- parsedHeaders: PathUpdateHeaders;
- };
- };
+ SetAccessControlRecursiveResponse;
/** Optional parameters. */
-export interface PathLeaseOptionalParams extends coreHttp.OperationOptions {
+export interface PathLeaseOptionalParams extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1075,16 +1003,10 @@ export interface PathLeaseOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the lease operation. */
-export type PathLeaseResponse = PathLeaseHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathLeaseHeaders;
- };
-};
+export type PathLeaseResponse = PathLeaseHeaders;
/** Optional parameters. */
-export interface PathReadOptionalParams extends coreHttp.OperationOptions {
+export interface PathReadOptionalParams extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1117,17 +1039,11 @@ export type PathReadResponse = PathReadHeaders & {
* Always `undefined` in the browser.
*/
readableStreamBody?: NodeJS.ReadableStream;
-
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathReadHeaders;
- };
};
/** Optional parameters. */
export interface PathGetPropertiesOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1143,16 +1059,10 @@ export interface PathGetPropertiesOptionalParams
}
/** Contains response data for the getProperties operation. */
-export type PathGetPropertiesResponse = PathGetPropertiesHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathGetPropertiesHeaders;
- };
-};
+export type PathGetPropertiesResponse = PathGetPropertiesHeaders;
/** Optional parameters. */
-export interface PathDeleteOptionalParams extends coreHttp.OperationOptions {
+export interface PathDeleteOptionalParams extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1168,17 +1078,11 @@ export interface PathDeleteOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the delete operation. */
-export type PathDeleteResponse = PathDeleteHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathDeleteHeaders;
- };
-};
+export type PathDeleteResponse = PathDeleteHeaders;
/** Optional parameters. */
export interface PathSetAccessControlOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1198,17 +1102,11 @@ export interface PathSetAccessControlOptionalParams
}
/** Contains response data for the setAccessControl operation. */
-export type PathSetAccessControlResponse = PathSetAccessControlHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathSetAccessControlHeaders;
- };
-};
+export type PathSetAccessControlResponse = PathSetAccessControlHeaders;
/** Optional parameters. */
export interface PathSetAccessControlRecursiveOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
@@ -1225,21 +1123,11 @@ export interface PathSetAccessControlRecursiveOptionalParams
/** Contains response data for the setAccessControlRecursive operation. */
export type PathSetAccessControlRecursiveResponse = PathSetAccessControlRecursiveHeaders &
- SetAccessControlRecursiveResponse & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: SetAccessControlRecursiveResponse;
- /** The parsed HTTP response headers. */
- parsedHeaders: PathSetAccessControlRecursiveHeaders;
- };
- };
+ SetAccessControlRecursiveResponse;
/** Optional parameters. */
-export interface PathFlushDataOptionalParams extends coreHttp.OperationOptions {
+export interface PathFlushDataOptionalParams
+ extends coreClient.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
@@ -1269,17 +1157,11 @@ export interface PathFlushDataOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the flushData operation. */
-export type PathFlushDataResponse = PathFlushDataHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathFlushDataHeaders;
- };
-};
+export type PathFlushDataResponse = PathFlushDataHeaders;
/** Optional parameters. */
export interface PathAppendDataOptionalParams
- extends coreHttp.OperationOptions {
+ extends coreClient.OperationOptions {
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Parameter group */
@@ -1307,16 +1189,11 @@ export interface PathAppendDataOptionalParams
}
/** Contains response data for the appendData operation. */
-export type PathAppendDataResponse = PathAppendDataHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathAppendDataHeaders;
- };
-};
+export type PathAppendDataResponse = PathAppendDataHeaders;
/** Optional parameters. */
-export interface PathSetExpiryOptionalParams extends coreHttp.OperationOptions {
+export interface PathSetExpiryOptionalParams
+ extends coreClient.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */
@@ -1326,16 +1203,11 @@ export interface PathSetExpiryOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the setExpiry operation. */
-export type PathSetExpiryResponse = PathSetExpiryHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathSetExpiryHeaders;
- };
-};
+export type PathSetExpiryResponse = PathSetExpiryHeaders;
/** Optional parameters. */
-export interface PathUndeleteOptionalParams extends coreHttp.OperationOptions {
+export interface PathUndeleteOptionalParams
+ extends coreClient.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */
@@ -1345,17 +1217,11 @@ export interface PathUndeleteOptionalParams extends coreHttp.OperationOptions {
}
/** Contains response data for the undelete operation. */
-export type PathUndeleteResponse = PathUndeleteHeaders & {
- /** The underlying HTTP response. */
- _response: coreHttp.HttpResponse & {
- /** The parsed HTTP response headers. */
- parsedHeaders: PathUndeleteHeaders;
- };
-};
+export type PathUndeleteResponse = PathUndeleteHeaders;
/** Optional parameters. */
export interface StorageClientOptionalParams
- extends coreHttp.ServiceClientOptions {
+ extends coreHttpCompat.ExtendedServiceClientOptions {
/** Specifies the version of the operation to use for this request. */
version?: string;
/** The value must be "filesystem" for all filesystem operations. */
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/models/mappers.ts b/sdk/storage/storage-file-datalake/src/generated/src/models/mappers.ts
index 0eda9de17ff0..8a8e02496b1b 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/models/mappers.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/models/mappers.ts
@@ -6,9 +6,9 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import * as coreHttp from "@azure/core-http";
+import * as coreClient from "@azure/core-client";
-export const FileSystemList: coreHttp.CompositeMapper = {
+export const FileSystemList: coreClient.CompositeMapper = {
serializedName: "FileSystemList",
type: {
name: "Composite",
@@ -32,7 +32,7 @@ export const FileSystemList: coreHttp.CompositeMapper = {
}
};
-export const FileSystem: coreHttp.CompositeMapper = {
+export const FileSystem: coreClient.CompositeMapper = {
serializedName: "FileSystem",
type: {
name: "Composite",
@@ -63,7 +63,7 @@ export const FileSystem: coreHttp.CompositeMapper = {
}
};
-export const StorageError: coreHttp.CompositeMapper = {
+export const StorageError: coreClient.CompositeMapper = {
serializedName: "StorageError",
type: {
name: "Composite",
@@ -88,7 +88,7 @@ export const StorageError: coreHttp.CompositeMapper = {
}
};
-export const StorageErrorError: coreHttp.CompositeMapper = {
+export const StorageErrorError: coreClient.CompositeMapper = {
serializedName: "StorageErrorError",
type: {
name: "Composite",
@@ -112,7 +112,7 @@ export const StorageErrorError: coreHttp.CompositeMapper = {
}
};
-export const PathList: coreHttp.CompositeMapper = {
+export const PathList: coreClient.CompositeMapper = {
serializedName: "PathList",
type: {
name: "Composite",
@@ -136,7 +136,7 @@ export const PathList: coreHttp.CompositeMapper = {
}
};
-export const Path: coreHttp.CompositeMapper = {
+export const Path: coreClient.CompositeMapper = {
serializedName: "Path",
type: {
name: "Composite",
@@ -150,6 +150,7 @@ export const Path: coreHttp.CompositeMapper = {
}
},
isDirectory: {
+ defaultValue: false,
serializedName: "isDirectory",
xmlName: "isDirectory",
type: {
@@ -223,7 +224,7 @@ export const Path: coreHttp.CompositeMapper = {
}
};
-export const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {
+export const ListBlobsHierarchySegmentResponse: coreClient.CompositeMapper = {
serializedName: "ListBlobsHierarchySegmentResponse",
xmlName: "EnumerationResults",
type: {
@@ -295,7 +296,7 @@ export const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {
}
};
-export const BlobHierarchyListSegment: coreHttp.CompositeMapper = {
+export const BlobHierarchyListSegment: coreClient.CompositeMapper = {
serializedName: "BlobHierarchyListSegment",
xmlName: "Blobs",
type: {
@@ -335,7 +336,7 @@ export const BlobHierarchyListSegment: coreHttp.CompositeMapper = {
}
};
-export const BlobPrefix: coreHttp.CompositeMapper = {
+export const BlobPrefix: coreClient.CompositeMapper = {
serializedName: "BlobPrefix",
type: {
name: "Composite",
@@ -353,7 +354,7 @@ export const BlobPrefix: coreHttp.CompositeMapper = {
}
};
-export const BlobItemModel: coreHttp.CompositeMapper = {
+export const BlobItemModel: coreClient.CompositeMapper = {
serializedName: "BlobItemModel",
xmlName: "Blob",
type: {
@@ -417,7 +418,7 @@ export const BlobItemModel: coreHttp.CompositeMapper = {
}
};
-export const BlobPropertiesModel: coreHttp.CompositeMapper = {
+export const BlobPropertiesModel: coreClient.CompositeMapper = {
serializedName: "BlobPropertiesModel",
xmlName: "Properties",
type: {
@@ -633,7 +634,7 @@ export const BlobPropertiesModel: coreHttp.CompositeMapper = {
}
};
-export const SetAccessControlRecursiveResponse: coreHttp.CompositeMapper = {
+export const SetAccessControlRecursiveResponse: coreClient.CompositeMapper = {
serializedName: "SetAccessControlRecursiveResponse",
type: {
name: "Composite",
@@ -678,7 +679,7 @@ export const SetAccessControlRecursiveResponse: coreHttp.CompositeMapper = {
}
};
-export const AclFailedEntry: coreHttp.CompositeMapper = {
+export const AclFailedEntry: coreClient.CompositeMapper = {
serializedName: "AclFailedEntry",
type: {
name: "Composite",
@@ -709,7 +710,7 @@ export const AclFailedEntry: coreHttp.CompositeMapper = {
}
};
-export const ServiceListFileSystemsHeaders: coreHttp.CompositeMapper = {
+export const ServiceListFileSystemsHeaders: coreClient.CompositeMapper = {
serializedName: "Service_listFileSystemsHeaders",
type: {
name: "Composite",
@@ -759,7 +760,7 @@ export const ServiceListFileSystemsHeaders: coreHttp.CompositeMapper = {
}
};
-export const ServiceListFileSystemsExceptionHeaders: coreHttp.CompositeMapper = {
+export const ServiceListFileSystemsExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Service_listFileSystemsExceptionHeaders",
type: {
name: "Composite",
@@ -776,7 +777,7 @@ export const ServiceListFileSystemsExceptionHeaders: coreHttp.CompositeMapper =
}
};
-export const FileSystemCreateHeaders: coreHttp.CompositeMapper = {
+export const FileSystemCreateHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_createHeaders",
type: {
name: "Composite",
@@ -833,7 +834,7 @@ export const FileSystemCreateHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemCreateExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemCreateExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_createExceptionHeaders",
type: {
name: "Composite",
@@ -850,7 +851,7 @@ export const FileSystemCreateExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemSetPropertiesHeaders: coreHttp.CompositeMapper = {
+export const FileSystemSetPropertiesHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_setPropertiesHeaders",
type: {
name: "Composite",
@@ -900,7 +901,7 @@ export const FileSystemSetPropertiesHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemSetPropertiesExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_setPropertiesExceptionHeaders",
type: {
name: "Composite",
@@ -917,7 +918,7 @@ export const FileSystemSetPropertiesExceptionHeaders: coreHttp.CompositeMapper =
}
};
-export const FileSystemGetPropertiesHeaders: coreHttp.CompositeMapper = {
+export const FileSystemGetPropertiesHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_getPropertiesHeaders",
type: {
name: "Composite",
@@ -981,7 +982,7 @@ export const FileSystemGetPropertiesHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemGetPropertiesExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_getPropertiesExceptionHeaders",
type: {
name: "Composite",
@@ -998,7 +999,7 @@ export const FileSystemGetPropertiesExceptionHeaders: coreHttp.CompositeMapper =
}
};
-export const FileSystemDeleteHeaders: coreHttp.CompositeMapper = {
+export const FileSystemDeleteHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_deleteHeaders",
type: {
name: "Composite",
@@ -1034,7 +1035,7 @@ export const FileSystemDeleteHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemDeleteExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemDeleteExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_deleteExceptionHeaders",
type: {
name: "Composite",
@@ -1051,7 +1052,7 @@ export const FileSystemDeleteExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemListPathsHeaders: coreHttp.CompositeMapper = {
+export const FileSystemListPathsHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_listPathsHeaders",
type: {
name: "Composite",
@@ -1115,7 +1116,7 @@ export const FileSystemListPathsHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemListPathsExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemListPathsExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_listPathsExceptionHeaders",
type: {
name: "Composite",
@@ -1132,7 +1133,7 @@ export const FileSystemListPathsExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const FileSystemListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {
+export const FileSystemListBlobHierarchySegmentHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_listBlobHierarchySegmentHeaders",
type: {
name: "Composite",
@@ -1184,7 +1185,7 @@ export const FileSystemListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper
}
};
-export const FileSystemListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {
+export const FileSystemListBlobHierarchySegmentExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "FileSystem_listBlobHierarchySegmentExceptionHeaders",
type: {
name: "Composite",
@@ -1201,7 +1202,7 @@ export const FileSystemListBlobHierarchySegmentExceptionHeaders: coreHttp.Compos
}
};
-export const PathCreateHeaders: coreHttp.CompositeMapper = {
+export const PathCreateHeaders: coreClient.CompositeMapper = {
serializedName: "Path_createHeaders",
type: {
name: "Composite",
@@ -1286,7 +1287,7 @@ export const PathCreateHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathCreateExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathCreateExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_createExceptionHeaders",
type: {
name: "Composite",
@@ -1303,7 +1304,7 @@ export const PathCreateExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathUpdateHeaders: coreHttp.CompositeMapper = {
+export const PathUpdateHeaders: coreClient.CompositeMapper = {
serializedName: "Path_updateHeaders",
type: {
name: "Composite",
@@ -1437,7 +1438,7 @@ export const PathUpdateHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathUpdateExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathUpdateExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_updateExceptionHeaders",
type: {
name: "Composite",
@@ -1454,7 +1455,7 @@ export const PathUpdateExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathLeaseHeaders: coreHttp.CompositeMapper = {
+export const PathLeaseHeaders: coreClient.CompositeMapper = {
serializedName: "Path_leaseHeaders",
type: {
name: "Composite",
@@ -1516,7 +1517,7 @@ export const PathLeaseHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathLeaseExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathLeaseExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_leaseExceptionHeaders",
type: {
name: "Composite",
@@ -1533,7 +1534,7 @@ export const PathLeaseExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathReadHeaders: coreHttp.CompositeMapper = {
+export const PathReadHeaders: coreClient.CompositeMapper = {
serializedName: "Path_readHeaders",
type: {
name: "Composite",
@@ -1695,7 +1696,7 @@ export const PathReadHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathReadExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathReadExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_readExceptionHeaders",
type: {
name: "Composite",
@@ -1712,7 +1713,7 @@ export const PathReadExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathGetPropertiesHeaders: coreHttp.CompositeMapper = {
+export const PathGetPropertiesHeaders: coreClient.CompositeMapper = {
serializedName: "Path_getPropertiesHeaders",
type: {
name: "Composite",
@@ -1895,7 +1896,7 @@ export const PathGetPropertiesHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathGetPropertiesExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_getPropertiesExceptionHeaders",
type: {
name: "Composite",
@@ -1912,7 +1913,7 @@ export const PathGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathDeleteHeaders: coreHttp.CompositeMapper = {
+export const PathDeleteHeaders: coreClient.CompositeMapper = {
serializedName: "Path_deleteHeaders",
type: {
name: "Composite",
@@ -1969,7 +1970,7 @@ export const PathDeleteHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathDeleteExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathDeleteExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_deleteExceptionHeaders",
type: {
name: "Composite",
@@ -1986,7 +1987,7 @@ export const PathDeleteExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetAccessControlHeaders: coreHttp.CompositeMapper = {
+export const PathSetAccessControlHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setAccessControlHeaders",
type: {
name: "Composite",
@@ -2038,7 +2039,7 @@ export const PathSetAccessControlHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetAccessControlExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathSetAccessControlExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setAccessControlExceptionHeaders",
type: {
name: "Composite",
@@ -2069,7 +2070,7 @@ export const PathSetAccessControlExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetAccessControlRecursiveHeaders: coreHttp.CompositeMapper = {
+export const PathSetAccessControlRecursiveHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setAccessControlRecursiveHeaders",
type: {
name: "Composite",
@@ -2114,7 +2115,7 @@ export const PathSetAccessControlRecursiveHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetAccessControlRecursiveExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathSetAccessControlRecursiveExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setAccessControlRecursiveExceptionHeaders",
type: {
name: "Composite",
@@ -2145,7 +2146,7 @@ export const PathSetAccessControlRecursiveExceptionHeaders: coreHttp.CompositeMa
}
};
-export const PathFlushDataHeaders: coreHttp.CompositeMapper = {
+export const PathFlushDataHeaders: coreClient.CompositeMapper = {
serializedName: "Path_flushDataHeaders",
type: {
name: "Composite",
@@ -2230,7 +2231,7 @@ export const PathFlushDataHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathFlushDataExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathFlushDataExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_flushDataExceptionHeaders",
type: {
name: "Composite",
@@ -2261,7 +2262,7 @@ export const PathFlushDataExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathAppendDataHeaders: coreHttp.CompositeMapper = {
+export const PathAppendDataHeaders: coreClient.CompositeMapper = {
serializedName: "Path_appendDataHeaders",
type: {
name: "Composite",
@@ -2341,7 +2342,7 @@ export const PathAppendDataHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathAppendDataExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathAppendDataExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_appendDataExceptionHeaders",
type: {
name: "Composite",
@@ -2372,7 +2373,7 @@ export const PathAppendDataExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetExpiryHeaders: coreHttp.CompositeMapper = {
+export const PathSetExpiryHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setExpiryHeaders",
type: {
name: "Composite",
@@ -2431,7 +2432,7 @@ export const PathSetExpiryHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathSetExpiryExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_setExpiryExceptionHeaders",
type: {
name: "Composite",
@@ -2448,7 +2449,7 @@ export const PathSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathUndeleteHeaders: coreHttp.CompositeMapper = {
+export const PathUndeleteHeaders: coreClient.CompositeMapper = {
serializedName: "Path_undeleteHeaders",
type: {
name: "Composite",
@@ -2493,7 +2494,7 @@ export const PathUndeleteHeaders: coreHttp.CompositeMapper = {
}
};
-export const PathUndeleteExceptionHeaders: coreHttp.CompositeMapper = {
+export const PathUndeleteExceptionHeaders: coreClient.CompositeMapper = {
serializedName: "Path_undeleteExceptionHeaders",
type: {
name: "Composite",
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/models/parameters.ts b/sdk/storage/storage-file-datalake/src/generated/src/models/parameters.ts
index ecba91d21e5d..b50ac8c8da28 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/models/parameters.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/models/parameters.ts
@@ -9,9 +9,8 @@
import {
OperationParameter,
OperationURLParameter,
- OperationQueryParameter,
- QueryCollectionFormat
-} from "@azure/core-http";
+ OperationQueryParameter
+} from "@azure/core-client";
export const accept: OperationParameter = {
parameterPath: "accept",
@@ -284,7 +283,7 @@ export const include: OperationQueryParameter = {
}
}
},
- collectionFormat: QueryCollectionFormat.Csv
+ collectionFormat: "CSV"
};
export const showonly: OperationQueryParameter = {
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystem.ts b/sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystemOperations.ts
similarity index 76%
rename from sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystem.ts
rename to sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystemOperations.ts
index c9985d943cfb..f5b80cda31fc 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystem.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operations/fileSystemOperations.ts
@@ -6,10 +6,11 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import * as coreHttp from "@azure/core-http";
+import { FileSystemOperations } from "../operationsInterfaces";
+import * as coreClient from "@azure/core-client";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
-import { StorageClientContext } from "../storageClientContext";
+import { StorageClient } from "../storageClient";
import {
FileSystemCreateOptionalParams,
FileSystemCreateResponse,
@@ -25,15 +26,15 @@ import {
FileSystemListBlobHierarchySegmentResponse
} from "../models";
-/** Class representing a FileSystem. */
-export class FileSystem {
- private readonly client: StorageClientContext;
+/** Class containing FileSystemOperations operations. */
+export class FileSystemOperationsImpl implements FileSystemOperations {
+ private readonly client: StorageClient;
/**
- * Initialize a new instance of the class FileSystem class.
+ * Initialize a new instance of the class FileSystemOperations class.
* @param client Reference to the service client
*/
- constructor(client: StorageClientContext) {
+ constructor(client: StorageClient) {
this.client = client;
}
@@ -45,13 +46,7 @@ export class FileSystem {
create(
options?: FileSystemCreateOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- createOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, createOperationSpec);
}
/**
@@ -63,13 +58,10 @@ export class FileSystem {
setProperties(
options?: FileSystemSetPropertiesOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
setPropertiesOperationSpec
- ) as Promise;
+ );
}
/**
@@ -79,13 +71,10 @@ export class FileSystem {
getProperties(
options?: FileSystemGetPropertiesOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
getPropertiesOperationSpec
- ) as Promise;
+ );
}
/**
@@ -103,13 +92,7 @@ export class FileSystem {
delete(
options?: FileSystemDeleteOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- deleteOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, deleteOperationSpec);
}
/**
@@ -121,14 +104,10 @@ export class FileSystem {
recursive: boolean,
options?: FileSystemListPathsOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- recursive,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { recursive, options },
listPathsOperationSpec
- ) as Promise;
+ );
}
/**
@@ -138,21 +117,18 @@ export class FileSystem {
listBlobHierarchySegment(
options?: FileSystemListBlobHierarchySegmentOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
listBlobHierarchySegmentOperationSpec
- ) as Promise;
+ );
}
}
// Operation Specifications
-const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
+const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
-const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
+const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
-const createOperationSpec: coreHttp.OperationSpec = {
+const createOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "PUT",
responses: {
@@ -174,7 +150,7 @@ const createOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const setPropertiesOperationSpec: coreHttp.OperationSpec = {
+const setPropertiesOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "PATCH",
responses: {
@@ -198,7 +174,7 @@ const setPropertiesOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const getPropertiesOperationSpec: coreHttp.OperationSpec = {
+const getPropertiesOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "HEAD",
responses: {
@@ -219,7 +195,7 @@ const getPropertiesOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const deleteOperationSpec: coreHttp.OperationSpec = {
+const deleteOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "DELETE",
responses: {
@@ -242,7 +218,7 @@ const deleteOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const listPathsOperationSpec: coreHttp.OperationSpec = {
+const listPathsOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "GET",
responses: {
@@ -272,7 +248,7 @@ const listPathsOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {
+const listBlobHierarchySegmentOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}",
httpMethod: "GET",
responses: {
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operations/index.ts b/sdk/storage/storage-file-datalake/src/generated/src/operations/index.ts
index aa030cbd60a7..05769142fb0c 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/operations/index.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operations/index.ts
@@ -7,5 +7,5 @@
*/
export * from "./service";
-export * from "./fileSystem";
-export * from "./path";
+export * from "./fileSystemOperations";
+export * from "./pathOperations";
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operations/path.ts b/sdk/storage/storage-file-datalake/src/generated/src/operations/pathOperations.ts
similarity index 82%
rename from sdk/storage/storage-file-datalake/src/generated/src/operations/path.ts
rename to sdk/storage/storage-file-datalake/src/generated/src/operations/pathOperations.ts
index 4f2723f590f5..7dab8f9ca437 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/operations/path.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operations/pathOperations.ts
@@ -6,10 +6,12 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import * as coreHttp from "@azure/core-http";
+import { PathOperations } from "../operationsInterfaces";
+import * as coreClient from "@azure/core-client";
+import * as coreRestPipeline from "@azure/core-rest-pipeline";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
-import { StorageClientContext } from "../storageClientContext";
+import { StorageClient } from "../storageClient";
import {
PathCreateOptionalParams,
PathCreateResponse,
@@ -41,15 +43,15 @@ import {
PathUndeleteResponse
} from "../models";
-/** Class representing a Path. */
-export class Path {
- private readonly client: StorageClientContext;
+/** Class containing PathOperations operations. */
+export class PathOperationsImpl implements PathOperations {
+ private readonly client: StorageClient;
/**
- * Initialize a new instance of the class Path class.
+ * Initialize a new instance of the class PathOperations class.
* @param client Reference to the service client
*/
- constructor(client: StorageClientContext) {
+ constructor(client: StorageClient) {
this.client = client;
}
@@ -62,13 +64,7 @@ export class Path {
* @param options The options parameters.
*/
create(options?: PathCreateOptionalParams): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- createOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, createOperationSpec);
}
/**
@@ -94,19 +90,13 @@ export class Path {
update(
action: PathUpdateAction,
mode: PathSetAccessControlRecursiveMode,
- body: coreHttp.HttpRequestBody,
+ body: coreRestPipeline.RequestBodyType,
options?: PathUpdateOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- action,
- mode,
- body,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { action, mode, body, options },
updateOperationSpec
- ) as Promise;
+ );
}
/**
@@ -129,14 +119,10 @@ export class Path {
xMsLeaseAction: PathLeaseAction,
options?: PathLeaseOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- xMsLeaseAction,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { xMsLeaseAction, options },
leaseOperationSpec
- ) as Promise;
+ );
}
/**
@@ -147,13 +133,7 @@ export class Path {
* @param options The options parameters.
*/
read(options?: PathReadOptionalParams): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- readOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, readOperationSpec);
}
/**
@@ -167,13 +147,10 @@ export class Path {
getProperties(
options?: PathGetPropertiesOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
getPropertiesOperationSpec
- ) as Promise;
+ );
}
/**
@@ -183,13 +160,7 @@ export class Path {
* @param options The options parameters.
*/
delete(options?: PathDeleteOptionalParams): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- deleteOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, deleteOperationSpec);
}
/**
@@ -199,13 +170,10 @@ export class Path {
setAccessControl(
options?: PathSetAccessControlOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
setAccessControlOperationSpec
- ) as Promise;
+ );
}
/**
@@ -219,14 +187,10 @@ export class Path {
mode: PathSetAccessControlRecursiveMode,
options?: PathSetAccessControlRecursiveOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- mode,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { mode, options },
setAccessControlRecursiveOperationSpec
- ) as Promise;
+ );
}
/**
@@ -236,13 +200,10 @@ export class Path {
flushData(
options?: PathFlushDataOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
flushDataOperationSpec
- ) as Promise;
+ );
}
/**
@@ -251,17 +212,13 @@ export class Path {
* @param options The options parameters.
*/
appendData(
- body: coreHttp.HttpRequestBody,
+ body: coreRestPipeline.RequestBodyType,
options?: PathAppendDataOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- body,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { body, options },
appendDataOperationSpec
- ) as Promise;
+ );
}
/**
@@ -273,14 +230,10 @@ export class Path {
expiryOptions: PathExpiryOptions,
options?: PathSetExpiryOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- expiryOptions,
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { expiryOptions, options },
setExpiryOperationSpec
- ) as Promise;
+ );
}
/**
@@ -290,19 +243,13 @@ export class Path {
undelete(
options?: PathUndeleteOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
- return this.client.sendOperationRequest(
- operationArguments,
- undeleteOperationSpec
- ) as Promise;
+ return this.client.sendOperationRequest({ options }, undeleteOperationSpec);
}
}
// Operation Specifications
-const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
+const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
-const createOperationSpec: coreHttp.OperationSpec = {
+const createOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PUT",
responses: {
@@ -357,7 +304,7 @@ const createOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const updateOperationSpec: coreHttp.OperationSpec = {
+const updateOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PATCH",
responses: {
@@ -412,7 +359,7 @@ const updateOperationSpec: coreHttp.OperationSpec = {
mediaType: "binary",
serializer
};
-const leaseOperationSpec: coreHttp.OperationSpec = {
+const leaseOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "POST",
responses: {
@@ -448,7 +395,7 @@ const leaseOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const readOperationSpec: coreHttp.OperationSpec = {
+const readOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "GET",
responses: {
@@ -490,7 +437,7 @@ const readOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const getPropertiesOperationSpec: coreHttp.OperationSpec = {
+const getPropertiesOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "HEAD",
responses: {
@@ -516,7 +463,7 @@ const getPropertiesOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const deleteOperationSpec: coreHttp.OperationSpec = {
+const deleteOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "DELETE",
responses: {
@@ -546,7 +493,7 @@ const deleteOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const setAccessControlOperationSpec: coreHttp.OperationSpec = {
+const setAccessControlOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PATCH",
responses: {
@@ -576,7 +523,7 @@ const setAccessControlOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const setAccessControlRecursiveOperationSpec: coreHttp.OperationSpec = {
+const setAccessControlRecursiveOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PATCH",
responses: {
@@ -606,7 +553,7 @@ const setAccessControlRecursiveOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const flushDataOperationSpec: coreHttp.OperationSpec = {
+const flushDataOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PATCH",
responses: {
@@ -651,7 +598,7 @@ const flushDataOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const appendDataOperationSpec: coreHttp.OperationSpec = {
+const appendDataOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PATCH",
responses: {
@@ -690,7 +637,7 @@ const appendDataOperationSpec: coreHttp.OperationSpec = {
mediaType: "binary",
serializer
};
-const setExpiryOperationSpec: coreHttp.OperationSpec = {
+const setExpiryOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PUT",
responses: {
@@ -713,7 +660,7 @@ const setExpiryOperationSpec: coreHttp.OperationSpec = {
],
serializer
};
-const undeleteOperationSpec: coreHttp.OperationSpec = {
+const undeleteOperationSpec: coreClient.OperationSpec = {
path: "/{filesystem}/{path}",
httpMethod: "PUT",
responses: {
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operations/service.ts b/sdk/storage/storage-file-datalake/src/generated/src/operations/service.ts
index d0c26c2bba66..d2038de9910f 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/operations/service.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operations/service.ts
@@ -6,24 +6,25 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import * as coreHttp from "@azure/core-http";
+import { Service } from "../operationsInterfaces";
+import * as coreClient from "@azure/core-client";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
-import { StorageClientContext } from "../storageClientContext";
+import { StorageClient } from "../storageClient";
import {
ServiceListFileSystemsOptionalParams,
ServiceListFileSystemsResponse
} from "../models";
-/** Class representing a Service. */
-export class Service {
- private readonly client: StorageClientContext;
+/** Class containing Service operations. */
+export class ServiceImpl implements Service {
+ private readonly client: StorageClient;
/**
* Initialize a new instance of the class Service class.
* @param client Reference to the service client
*/
- constructor(client: StorageClientContext) {
+ constructor(client: StorageClient) {
this.client = client;
}
@@ -34,19 +35,16 @@ export class Service {
listFileSystems(
options?: ServiceListFileSystemsOptionalParams
): Promise {
- const operationArguments: coreHttp.OperationArguments = {
- options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
- };
return this.client.sendOperationRequest(
- operationArguments,
+ { options },
listFileSystemsOperationSpec
- ) as Promise;
+ );
}
}
// Operation Specifications
-const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
+const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
-const listFileSystemsOperationSpec: coreHttp.OperationSpec = {
+const listFileSystemsOperationSpec: coreClient.OperationSpec = {
path: "/",
httpMethod: "GET",
responses: {
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/fileSystemOperations.ts b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/fileSystemOperations.ts
new file mode 100644
index 000000000000..de40d0b921ee
--- /dev/null
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/fileSystemOperations.ts
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+import {
+ FileSystemCreateOptionalParams,
+ FileSystemCreateResponse,
+ FileSystemSetPropertiesOptionalParams,
+ FileSystemSetPropertiesResponse,
+ FileSystemGetPropertiesOptionalParams,
+ FileSystemGetPropertiesResponse,
+ FileSystemDeleteOptionalParams,
+ FileSystemDeleteResponse,
+ FileSystemListPathsOptionalParams,
+ FileSystemListPathsResponse,
+ FileSystemListBlobHierarchySegmentOptionalParams,
+ FileSystemListBlobHierarchySegmentResponse
+} from "../models";
+
+/** Interface representing a FileSystemOperations. */
+export interface FileSystemOperations {
+ /**
+ * Create a FileSystem rooted at the specified location. If the FileSystem already exists, the
+ * operation fails. This operation does not support conditional HTTP requests.
+ * @param options The options parameters.
+ */
+ create(
+ options?: FileSystemCreateOptionalParams
+ ): Promise;
+ /**
+ * Set properties for the FileSystem. This operation supports conditional HTTP requests. For more
+ * information, see [Specifying Conditional Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param options The options parameters.
+ */
+ setProperties(
+ options?: FileSystemSetPropertiesOptionalParams
+ ): Promise;
+ /**
+ * All system and user-defined filesystem properties are specified in the response headers.
+ * @param options The options parameters.
+ */
+ getProperties(
+ options?: FileSystemGetPropertiesOptionalParams
+ ): Promise;
+ /**
+ * Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same
+ * identifier cannot be created for at least 30 seconds. While the filesystem is being deleted,
+ * attempts to create a filesystem with the same identifier will fail with status code 409 (Conflict),
+ * with the service returning additional error information indicating that the filesystem is being
+ * deleted. All other operations, including operations on any files or directories within the
+ * filesystem, will fail with status code 404 (Not Found) while the filesystem is being deleted. This
+ * operation supports conditional HTTP requests. For more information, see [Specifying Conditional
+ * Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param options The options parameters.
+ */
+ delete(
+ options?: FileSystemDeleteOptionalParams
+ ): Promise;
+ /**
+ * List FileSystem paths and their properties.
+ * @param recursive Required
+ * @param options The options parameters.
+ */
+ listPaths(
+ recursive: boolean,
+ options?: FileSystemListPathsOptionalParams
+ ): Promise;
+ /**
+ * The List Blobs operation returns a list of the blobs under the specified container
+ * @param options The options parameters.
+ */
+ listBlobHierarchySegment(
+ options?: FileSystemListBlobHierarchySegmentOptionalParams
+ ): Promise;
+}
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/index.ts b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/index.ts
new file mode 100644
index 000000000000..05769142fb0c
--- /dev/null
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/index.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+export * from "./service";
+export * from "./fileSystemOperations";
+export * from "./pathOperations";
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/pathOperations.ts b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/pathOperations.ts
new file mode 100644
index 000000000000..b5c6cf4dcc43
--- /dev/null
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/pathOperations.ts
@@ -0,0 +1,172 @@
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+import * as coreRestPipeline from "@azure/core-rest-pipeline";
+import {
+ PathCreateOptionalParams,
+ PathCreateResponse,
+ PathUpdateAction,
+ PathSetAccessControlRecursiveMode,
+ PathUpdateOptionalParams,
+ PathUpdateResponse,
+ PathLeaseAction,
+ PathLeaseOptionalParams,
+ PathLeaseResponse,
+ PathReadOptionalParams,
+ PathReadResponse,
+ PathGetPropertiesOptionalParams,
+ PathGetPropertiesResponse,
+ PathDeleteOptionalParams,
+ PathDeleteResponse,
+ PathSetAccessControlOptionalParams,
+ PathSetAccessControlResponse,
+ PathSetAccessControlRecursiveOptionalParams,
+ PathSetAccessControlRecursiveResponse,
+ PathFlushDataOptionalParams,
+ PathFlushDataResponse,
+ PathAppendDataOptionalParams,
+ PathAppendDataResponse,
+ PathExpiryOptions,
+ PathSetExpiryOptionalParams,
+ PathSetExpiryResponse,
+ PathUndeleteOptionalParams,
+ PathUndeleteResponse
+} from "../models";
+
+/** Interface representing a PathOperations. */
+export interface PathOperations {
+ /**
+ * Create or rename a file or directory. By default, the destination is overwritten and if the
+ * destination already exists and has a lease the lease is broken. This operation supports conditional
+ * HTTP requests. For more information, see [Specifying Conditional Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * To fail if the destination already exists, use a conditional request with If-None-Match: "*".
+ * @param options The options parameters.
+ */
+ create(options?: PathCreateOptionalParams): Promise;
+ /**
+ * Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, sets
+ * properties for a file or directory, or sets access control for a file or directory. Data can only be
+ * appended to a file. Concurrent writes to the same file using multiple clients are not supported.
+ * This operation supports conditional HTTP requests. For more information, see [Specifying Conditional
+ * Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param action The action must be "append" to upload data to be appended to a file, "flush" to flush
+ * previously uploaded data to a file, "setProperties" to set the properties of a file or directory,
+ * "setAccessControl" to set the owner, group, permissions, or access control list for a file or
+ * directory, or "setAccessControlRecursive" to set the access control list for a directory
+ * recursively. Note that Hierarchical Namespace must be enabled for the account in order to use access
+ * control. Also note that the Access Control List (ACL) includes permissions for the owner, owning
+ * group, and others, so the x-ms-permissions and x-ms-acl request headers are mutually exclusive.
+ * @param mode Mode "set" sets POSIX access control rights on files and directories, "modify" modifies
+ * one or more POSIX access control rights that pre-exist on files and directories, "remove" removes
+ * one or more POSIX access control rights that were present earlier on files and directories
+ * @param body Initial data
+ * @param options The options parameters.
+ */
+ update(
+ action: PathUpdateAction,
+ mode: PathSetAccessControlRecursiveMode,
+ body: coreRestPipeline.RequestBodyType,
+ options?: PathUpdateOptionalParams
+ ): Promise;
+ /**
+ * Create and manage a lease to restrict write and delete access to the path. This operation supports
+ * conditional HTTP requests. For more information, see [Specifying Conditional Headers for Blob
+ * Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param xMsLeaseAction There are five lease actions: "acquire", "break", "change", "renew", and
+ * "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" to
+ * acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the lease break
+ * period is allowed to elapse, during which time no lease operation except break and release can be
+ * performed on the file. When a lease is successfully broken, the response indicates the interval in
+ * seconds until a new lease can be acquired. Use "change" and specify the current lease ID in
+ * "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to change the lease ID of an active
+ * lease. Use "renew" and specify the "x-ms-lease-id" to renew an existing lease. Use "release" and
+ * specify the "x-ms-lease-id" to release a lease.
+ * @param options The options parameters.
+ */
+ lease(
+ xMsLeaseAction: PathLeaseAction,
+ options?: PathLeaseOptionalParams
+ ): Promise;
+ /**
+ * Read the contents of a file. For read operations, range requests are supported. This operation
+ * supports conditional HTTP requests. For more information, see [Specifying Conditional Headers for
+ * Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param options The options parameters.
+ */
+ read(options?: PathReadOptionalParams): Promise;
+ /**
+ * Get Properties returns all system and user defined properties for a path. Get Status returns all
+ * system defined properties for a path. Get Access Control List returns the access control list for a
+ * path. This operation supports conditional HTTP requests. For more information, see [Specifying
+ * Conditional Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param options The options parameters.
+ */
+ getProperties(
+ options?: PathGetPropertiesOptionalParams
+ ): Promise;
+ /**
+ * Delete the file or directory. This operation supports conditional HTTP requests. For more
+ * information, see [Specifying Conditional Headers for Blob Service
+ * Operations](https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations).
+ * @param options The options parameters.
+ */
+ delete(options?: PathDeleteOptionalParams): Promise;
+ /**
+ * Set the owner, group, permissions, or access control list for a path.
+ * @param options The options parameters.
+ */
+ setAccessControl(
+ options?: PathSetAccessControlOptionalParams
+ ): Promise;
+ /**
+ * Set the access control list for a path and sub-paths.
+ * @param mode Mode "set" sets POSIX access control rights on files and directories, "modify" modifies
+ * one or more POSIX access control rights that pre-exist on files and directories, "remove" removes
+ * one or more POSIX access control rights that were present earlier on files and directories
+ * @param options The options parameters.
+ */
+ setAccessControlRecursive(
+ mode: PathSetAccessControlRecursiveMode,
+ options?: PathSetAccessControlRecursiveOptionalParams
+ ): Promise;
+ /**
+ * Set the owner, group, permissions, or access control list for a path.
+ * @param options The options parameters.
+ */
+ flushData(
+ options?: PathFlushDataOptionalParams
+ ): Promise;
+ /**
+ * Append data to the file.
+ * @param body Initial data
+ * @param options The options parameters.
+ */
+ appendData(
+ body: coreRestPipeline.RequestBodyType,
+ options?: PathAppendDataOptionalParams
+ ): Promise;
+ /**
+ * Sets the time a blob will expire and be deleted.
+ * @param expiryOptions Required. Indicates mode of the expiry time
+ * @param options The options parameters.
+ */
+ setExpiry(
+ expiryOptions: PathExpiryOptions,
+ options?: PathSetExpiryOptionalParams
+ ): Promise;
+ /**
+ * Undelete a path that was previously soft deleted
+ * @param options The options parameters.
+ */
+ undelete(options?: PathUndeleteOptionalParams): Promise;
+}
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/service.ts b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/service.ts
new file mode 100644
index 000000000000..7f21937e467f
--- /dev/null
+++ b/sdk/storage/storage-file-datalake/src/generated/src/operationsInterfaces/service.ts
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+import {
+ ServiceListFileSystemsOptionalParams,
+ ServiceListFileSystemsResponse
+} from "../models";
+
+/** Interface representing a Service. */
+export interface Service {
+ /**
+ * List filesystems and their properties in given account.
+ * @param options The options parameters.
+ */
+ listFileSystems(
+ options?: ServiceListFileSystemsOptionalParams
+ ): Promise;
+}
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/storageClient.ts b/sdk/storage/storage-file-datalake/src/generated/src/storageClient.ts
index 5fb8a1e729b1..8b4871968d82 100644
--- a/sdk/storage/storage-file-datalake/src/generated/src/storageClient.ts
+++ b/sdk/storage/storage-file-datalake/src/generated/src/storageClient.ts
@@ -6,11 +6,25 @@
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-import { Service, FileSystem, Path } from "./operations";
-import { StorageClientContext } from "./storageClientContext";
+import * as coreHttpCompat from "@azure/core-http-compat";
+import {
+ ServiceImpl,
+ FileSystemOperationsImpl,
+ PathOperationsImpl
+} from "./operations";
+import {
+ Service,
+ FileSystemOperations,
+ PathOperations
+} from "./operationsInterfaces";
import { StorageClientOptionalParams } from "./models";
-export class StorageClient extends StorageClientContext {
+export class StorageClient extends coreHttpCompat.ExtendedServiceClient {
+ url: string;
+ version: string;
+ resource: string;
+ xMsLeaseDuration?: number;
+
/**
* Initializes a new instance of the StorageClient class.
* @param url The URL of the service account, container, or blob that is the target of the desired
@@ -18,13 +32,45 @@ export class StorageClient extends StorageClientContext {
* @param options The parameter options
*/
constructor(url: string, options?: StorageClientOptionalParams) {
- super(url, options);
- this.service = new Service(this);
- this.fileSystem = new FileSystem(this);
- this.path = new Path(this);
+ if (url === undefined) {
+ throw new Error("'url' cannot be null");
+ }
+
+ // Initializing default values for options
+ if (!options) {
+ options = {};
+ }
+ const defaults: StorageClientOptionalParams = {
+ requestContentType: "application/json; charset=utf-8"
+ };
+
+ const packageDetails = `azsdk-js-azure-storage-datalake/12.12.0`;
+ const userAgentPrefix =
+ options.userAgentOptions && options.userAgentOptions.userAgentPrefix
+ ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
+ : `${packageDetails}`;
+
+ const optionsWithDefaults = {
+ ...defaults,
+ ...options,
+ userAgentOptions: {
+ userAgentPrefix
+ },
+ endpoint: options.endpoint ?? options.baseUri ?? "{url}"
+ };
+ super(optionsWithDefaults);
+ // Parameter assignments
+ this.url = url;
+
+ // Assigning values to Constant parameters
+ this.version = options.version || "2021-12-02";
+ this.resource = options.resource || "filesystem";
+ this.service = new ServiceImpl(this);
+ this.fileSystemOperations = new FileSystemOperationsImpl(this);
+ this.pathOperations = new PathOperationsImpl(this);
}
service: Service;
- fileSystem: FileSystem;
- path: Path;
+ fileSystemOperations: FileSystemOperations;
+ pathOperations: PathOperations;
}
diff --git a/sdk/storage/storage-file-datalake/src/generated/src/storageClientContext.ts b/sdk/storage/storage-file-datalake/src/generated/src/storageClientContext.ts
deleted file mode 100644
index dde67165cfac..000000000000
--- a/sdk/storage/storage-file-datalake/src/generated/src/storageClientContext.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) Microsoft Corporation.
- * Licensed under the MIT License.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-import * as coreHttp from "@azure/core-http";
-import { StorageClientOptionalParams } from "./models";
-
-const packageName = "azure-storage-datalake";
-const packageVersion = "12.12.0";
-
-export class StorageClientContext extends coreHttp.ServiceClient {
- url: string;
- version: string;
- resource: string;
- xMsLeaseDuration?: number;
-
- /**
- * Initializes a new instance of the StorageClientContext class.
- * @param url The URL of the service account, container, or blob that is the target of the desired
- * operation.
- * @param options The parameter options
- */
- constructor(url: string, options?: StorageClientOptionalParams) {
- if (url === undefined) {
- throw new Error("'url' cannot be null");
- }
-
- // Initializing default values for options
- if (!options) {
- options = {};
- }
-
- if (!options.userAgent) {
- const defaultUserAgent = coreHttp.getDefaultUserAgentValue();
- options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;
- }
-
- super(undefined, options);
-
- this.requestContentType = "application/json; charset=utf-8";
-
- this.baseUri = options.endpoint || "{url}";
-
- // Parameter assignments
- this.url = url;
-
- // Assigning values to Constant parameters
- this.version = options.version || "2021-12-02";
- this.resource = options.resource || "filesystem";
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/index.browser.ts b/sdk/storage/storage-file-datalake/src/index.browser.ts
index 79470ab79c66..66d37716da14 100644
--- a/sdk/storage/storage-file-datalake/src/index.browser.ts
+++ b/sdk/storage/storage-file-datalake/src/index.browser.ts
@@ -5,16 +5,24 @@ export * from "./DataLakeServiceClient";
export * from "./DataLakeFileSystemClient";
export * from "./clients";
export * from "./DataLakeLeaseClient";
-export * from "./StorageBrowserPolicyFactory";
-export * from "./credentials/AnonymousCredential";
-export * from "./credentials/Credential";
-export * from "./Pipeline";
-export * from "./policies/AnonymousCredentialPolicy";
-export * from "./policies/CredentialPolicy";
-export * from "./StorageRetryPolicyFactory";
export * from "./models";
export * from "./utils/DataLakeAclChangeFailedError";
-export { CommonOptions } from "./StorageClient";
+export {
+ BlobServiceProperties as DataLakeServiceProperties,
+ ServiceGetPropertiesResponse as DataLakeServiceGetPropertiesResponse,
+ Pipeline,
+ newPipeline,
+ CommonOptions,
+ StoragePipelineOptions,
+ AnonymousCredential,
+ Credential,
+ AnonymousCredentialPolicy,
+ CredentialPolicy,
+ StorageBrowserPolicy,
+ StorageBrowserPolicyFactory,
+ StorageRetryPolicy,
+ StorageRetryPolicyFactory,
+} from "@azure/storage-blob";
export { ToBlobEndpointHostMappings, ToDfsEndpointHostMappings } from "./utils/constants";
-export { RestError } from "@azure/core-http";
+export { RestError } from "@azure/core-rest-pipeline";
export { logger } from "./log";
diff --git a/sdk/storage/storage-file-datalake/src/index.ts b/sdk/storage/storage-file-datalake/src/index.ts
index 6a0524b8e811..37c8184372fb 100644
--- a/sdk/storage/storage-file-datalake/src/index.ts
+++ b/sdk/storage/storage-file-datalake/src/index.ts
@@ -11,15 +11,6 @@ export * from "./sas/AccountSASSignatureValues";
export * from "./sas/DataLakeSASPermissions";
export * from "./sas/DataLakeSASSignatureValues";
export * from "./sas/FileSystemSASPermissions";
-export * from "./StorageBrowserPolicyFactory";
-export * from "./credentials/AnonymousCredential";
-export * from "./credentials/Credential";
-export * from "./credentials/StorageSharedKeyCredential";
-export * from "./Pipeline";
-export * from "./policies/AnonymousCredentialPolicy";
-export * from "./policies/CredentialPolicy";
-export * from "./StorageRetryPolicyFactory";
-export * from "./policies/StorageSharedKeyCredentialPolicy";
export * from "./sas/SASQueryParameters";
export * from "./models";
export * from "./utils/DataLakeAclChangeFailedError";
@@ -27,11 +18,24 @@ export * from "./utils/DataLakeAclChangeFailedError";
export {
BlobServiceProperties as DataLakeServiceProperties,
ServiceGetPropertiesResponse as DataLakeServiceGetPropertiesResponse,
+ Pipeline,
+ newPipeline,
+ CommonOptions,
+ StoragePipelineOptions,
+ AnonymousCredential,
+ Credential,
+ StorageSharedKeyCredential,
+ AnonymousCredentialPolicy,
+ CredentialPolicy,
+ StorageBrowserPolicy,
+ StorageBrowserPolicyFactory,
+ StorageRetryPolicy,
+ StorageRetryPolicyFactory,
+ StorageSharedKeyCredentialPolicy,
} from "@azure/storage-blob";
-export { CommonOptions } from "./StorageClient";
export { SasIPRange } from "./sas/SasIPRange";
export { ToBlobEndpointHostMappings, ToDfsEndpointHostMappings } from "./utils/constants";
-export { RestError } from "@azure/core-http";
+export { RestError } from "@azure/core-rest-pipeline";
export { logger } from "./log";
export * from "./sas/DirectorySASPermissions";
diff --git a/sdk/storage/storage-file-datalake/src/models.ts b/sdk/storage/storage-file-datalake/src/models.ts
index 46fecb4342bf..622b14bc79d9 100644
--- a/sdk/storage/storage-file-datalake/src/models.ts
+++ b/sdk/storage/storage-file-datalake/src/models.ts
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { AbortSignalLike } from "@azure/abort-controller";
-import { HttpResponse, TransferProgressEvent } from "@azure/core-http";
+import { TransferProgressEvent } from "@azure/core-rest-pipeline";
import {
LeaseAccessConditions,
@@ -11,6 +11,8 @@ import {
ServiceRenameContainerOptions,
ContainerRenameResponse,
ContainerUndeleteResponse,
+ CommonOptions,
+ WithResponse,
} from "@azure/storage-blob";
import { DataLakePathClient } from "./clients";
export type ModifiedAccessConditions = Omit;
@@ -41,10 +43,14 @@ import {
FileSystemListPathsHeaders,
LeaseAction,
ListBlobsHierarchySegmentResponse,
- PathCreateResponse,
- PathDeleteResponse,
+ PathAppendDataHeaders,
+ PathCreateHeaders,
+ PathDeleteHeaders,
+ PathFlushDataHeaders,
PathGetPropertiesHeaders as PathGetPropertiesHeadersModel,
PathList as PathListModel,
+ PathSetAccessControlHeaders,
+ PathSetExpiryHeaders,
PathUndeleteHeaders,
} from "./generated/src/models";
import { DataLakeSASPermissions } from "./sas/DataLakeSASPermissions";
@@ -52,7 +58,6 @@ import { DirectorySASPermissions } from "./sas/DirectorySASPermissions";
import { FileSystemSASPermissions } from "./sas/FileSystemSASPermissions";
import { SasIPRange } from "./sas/SasIPRange";
import { SASProtocol } from "./sas/SASQueryParameters";
-import { CommonOptions } from "./StorageClient";
export {
LeaseAccessConditions,
@@ -72,33 +77,38 @@ export {
EncryptionAlgorithmType,
FileSystemListPathsHeaders,
FileSystemListBlobHierarchySegmentHeaders,
- FileSystemListPathsResponse as ListPathsSegmentResponse,
ListBlobsHierarchySegmentResponse,
Path as PathModel,
PathList as PathListModel,
PathCreateHeaders,
PathDeleteHeaders,
- PathDeleteResponse,
PathGetPropertiesHeaders as PathGetPropertiesHeadersModel,
PathSetAccessControlHeaders,
- PathSetAccessControlResponse,
- PathSetAccessControlResponse as PathSetPermissionsResponse,
PathResourceType as PathResourceTypeModel,
PathUndeleteHeaders,
PathUpdateHeaders,
PathAppendDataHeaders,
PathFlushDataHeaders,
- PathAppendDataResponse as FileAppendResponse,
- PathFlushDataResponse as FileFlushResponse,
- PathFlushDataResponse as FileUploadResponse,
PathGetPropertiesAction as PathGetPropertiesActionModel,
PathRenameMode as PathRenameModeModel,
PathExpiryOptions as FileExpiryMode,
- PathSetExpiryResponse as FileSetExpiryResponse,
PathSetExpiryHeaders as FileSetExpiryHeaders,
} from "./generated/src/models";
-export { PathCreateResponse };
+export type PathCreateResponse = WithResponse;
+export type PathDeleteResponse = WithResponse;
+export type FileFlushResponse = WithResponse;
+export type FileUploadResponse = WithResponse;
+export type PathSetAccessControlResponse = WithResponse<
+ PathSetAccessControlHeaders,
+ PathSetAccessControlHeaders
+>;
+export type PathSetPermissionsResponse = WithResponse<
+ PathSetAccessControlHeaders,
+ PathSetAccessControlHeaders
+>;
+export type FileAppendResponse = WithResponse;
+export type FileSetExpiryResponse = WithResponse;
/**
* Common options of the {@link FileSystemGenerateSasUrlOptions}, {@link DirectoryGenerateSasUrlOptions}
@@ -195,14 +205,11 @@ export interface UserDelegationKey {
value: string;
}
-export type ServiceGetUserDelegationKeyResponse = UserDelegationKey &
- ServiceGetUserDelegationKeyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: ServiceGetUserDelegationKeyHeaders;
- bodyAsText: string;
- parsedBody: UserDelegationKeyModel;
- };
- };
+export type ServiceGetUserDelegationKeyResponse = WithResponse<
+ UserDelegationKey & ServiceGetUserDelegationKeyHeaders,
+ ServiceGetUserDelegationKeyHeaders,
+ UserDelegationKeyModel
+>;
export interface ServiceListFileSystemsOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -257,14 +264,11 @@ export interface ServiceListFileSystemsSegmentHeaders {
version?: string;
}
-export type ServiceListFileSystemsSegmentResponse = ListFileSystemsSegmentResponse &
- ServiceListFileSystemsSegmentHeaders & {
- _response: HttpResponse & {
- parsedHeaders: ServiceListFileSystemsSegmentHeaders;
- bodyAsText: string;
- parsedBody: ListFileSystemsSegmentResponse;
- };
- };
+export type ServiceListFileSystemsSegmentResponse = WithResponse<
+ ListFileSystemsSegmentResponse & ServiceListFileSystemsSegmentHeaders,
+ ServiceListFileSystemsSegmentHeaders,
+ ListFileSystemsSegmentResponse
+>;
/**
* Options to configure {@link DataLakeServiceClient.generateAccountSasUrl} operation.
@@ -336,11 +340,10 @@ export interface FileSystemCreateHeaders {
date?: Date;
}
-export type FileSystemCreateResponse = FileSystemCreateHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemCreateHeaders;
- };
-};
+export type FileSystemCreateResponse = WithResponse<
+ FileSystemCreateHeaders,
+ FileSystemCreateHeaders
+>;
export interface FileSystemDeleteOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -354,11 +357,10 @@ export interface FileSystemDeleteHeaders {
date?: Date;
}
-export type FileSystemDeleteResponse = FileSystemDeleteHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemDeleteHeaders;
- };
-};
+export type FileSystemDeleteResponse = WithResponse<
+ FileSystemDeleteHeaders,
+ FileSystemDeleteHeaders
+>;
export interface FileSystemGetPropertiesOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -385,11 +387,10 @@ export interface FileSystemGetPropertiesHeaders {
defaultEncryptionScope?: string;
}
-export type FileSystemGetPropertiesResponse = FileSystemGetPropertiesHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemGetPropertiesHeaders;
- };
-};
+export type FileSystemGetPropertiesResponse = WithResponse<
+ FileSystemGetPropertiesHeaders,
+ FileSystemGetPropertiesHeaders
+>;
export interface FileSystemSetMetadataOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -405,11 +406,10 @@ export interface FileSystemSetMetadataHeaders {
date?: Date;
}
-export type FileSystemSetMetadataResponse = FileSystemSetMetadataHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemSetMetadataHeaders;
- };
-};
+export type FileSystemSetMetadataResponse = WithResponse<
+ FileSystemSetMetadataHeaders,
+ FileSystemSetMetadataHeaders
+>;
export interface FileSystemGetAccessPolicyOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -443,15 +443,13 @@ export interface SignedIdentifier {
accessPolicy: T;
}
-export type FileSystemGetAccessPolicyResponse = {
- signedIdentifiers: SignedIdentifier[];
-} & FileSystemGetAccessPolicyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemGetAccessPolicyHeaders;
- bodyAsText: string;
- parsedBody: SignedIdentifier[];
- };
- };
+export type FileSystemGetAccessPolicyResponse = WithResponse<
+ {
+ signedIdentifiers: SignedIdentifier[];
+ } & FileSystemGetAccessPolicyHeaders,
+ FileSystemGetAccessPolicyHeaders,
+ SignedIdentifier[]
+>;
export interface FileSystemSetAccessPolicyOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -467,11 +465,10 @@ export interface FileSystemSetAccessPolicyHeaders {
date?: Date;
}
-export type FileSystemSetAccessPolicyResponse = FileSystemSetAccessPolicyHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemSetAccessPolicyHeaders;
- };
-};
+export type FileSystemSetAccessPolicyResponse = WithResponse<
+ FileSystemSetAccessPolicyHeaders,
+ FileSystemSetAccessPolicyHeaders
+>;
export interface ListPathsOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -511,14 +508,11 @@ export interface PathList {
pathItems?: Path[];
}
-export type FileSystemListPathsResponse = PathList &
- FileSystemListPathsHeaders & {
- _response: HttpResponse & {
- parsedHeaders: FileSystemListPathsHeaders;
- bodyAsText: string;
- parsedBody: PathListModel;
- };
- };
+export type FileSystemListPathsResponse = WithResponse<
+ PathList & FileSystemListPathsHeaders,
+ FileSystemListPathsHeaders,
+ PathListModel
+>;
export interface ListDeletedPathsOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -541,32 +535,22 @@ export interface DeletedPathList {
pathItems?: DeletedPath[];
}
-export type FileSystemListDeletedPathsResponse = DeletedPathList &
- FileSystemListBlobHierarchySegmentHeaders &
- ListBlobsHierarchySegmentResponse & {
- _response: HttpResponse & {
- /** The response body as text (string format) */
- bodyAsText: string;
-
- /** The response body as parsed JSON or XML */
- parsedBody: ListBlobsHierarchySegmentResponse;
- /** The parsed HTTP response headers. */
- parsedHeaders: FileSystemListBlobHierarchySegmentHeaders;
- };
-
- continuation?: string;
- };
+export type FileSystemListDeletedPathsResponse = WithResponse<
+ DeletedPathList &
+ FileSystemListBlobHierarchySegmentHeaders &
+ ListBlobsHierarchySegmentResponse & { continuation?: string },
+ FileSystemListBlobHierarchySegmentHeaders,
+ ListBlobsHierarchySegmentResponse
+>;
export interface FileSystemUndeletePathOption extends CommonOptions {
abortSignal?: AbortSignalLike;
}
-export type FileSystemUndeletePathResponse = PathUndeleteHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathUndeleteHeaders;
- };
- pathClient: DataLakePathClient;
-};
+export type FileSystemUndeletePathResponse = WithResponse<
+ PathUndeleteHeaders & { pathClient: DataLakePathClient },
+ PathUndeleteHeaders
+>;
/**
* Option interface for Data Lake file system exists operations
@@ -790,12 +774,10 @@ export interface PathAccessControl {
acl: PathAccessControlItem[];
}
-export type PathGetAccessControlResponse = PathAccessControl &
- PathGetAccessControlHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathGetPropertiesHeadersModel;
- };
- };
+export type PathGetAccessControlResponse = WithResponse<
+ PathAccessControl & PathGetAccessControlHeaders,
+ PathGetPropertiesHeadersModel
+>;
export interface PathSetAccessControlOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -979,11 +961,10 @@ export interface PathGetPropertiesHeaders {
expiresOn?: Date;
}
-export type PathGetPropertiesResponse = PathGetPropertiesHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathGetPropertiesHeaders;
- };
-};
+export type PathGetPropertiesResponse = WithResponse<
+ PathGetPropertiesHeaders,
+ PathGetPropertiesHeaders
+>;
export interface PathSetHttpHeadersOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -1008,11 +989,10 @@ export interface PathSetHttpHeadersHeaders {
date?: Date;
}
-export type PathSetHttpHeadersResponse = PathSetHttpHeadersHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathSetHttpHeadersHeaders;
- };
-};
+export type PathSetHttpHeadersResponse = WithResponse<
+ PathSetHttpHeadersHeaders,
+ PathSetHttpHeadersHeaders
+>;
export interface PathSetMetadataOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -1034,11 +1014,7 @@ export interface PathSetMetadataHeaders {
encryptionKeySha256?: string;
}
-export type PathSetMetadataResponse = PathSetMetadataHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathSetMetadataHeaders;
- };
-};
+export type PathSetMetadataResponse = WithResponse;
export interface PathMoveOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
@@ -1055,11 +1031,7 @@ export interface PathRemoveHeaders {
contentLength?: number;
}
-export type PathMoveResponse = PathRemoveHeaders & {
- _response: HttpResponse & {
- parsedHeaders: PathRemoveHeaders;
- };
-};
+export type PathMoveResponse = WithResponse;
/**
* Option interface for Data Lake directory/file exists operations
@@ -1201,13 +1173,13 @@ export interface FileReadHeaders {
contentCrc64?: Uint8Array;
}
-export type FileReadResponse = FileReadHeaders & {
- contentAsBlob?: Promise;
- readableStreamBody?: NodeJS.ReadableStream;
- _response: HttpResponse & {
- parsedHeaders: FileReadHeaders;
- };
-};
+export type FileReadResponse = WithResponse<
+ FileReadHeaders & {
+ contentAsBlob?: Promise;
+ readableStreamBody?: NodeJS.ReadableStream;
+ },
+ FileReadHeaders
+>;
export interface FileAppendOptions extends CommonOptions {
abortSignal?: AbortSignalLike;
diff --git a/sdk/storage/storage-file-datalake/src/policies/AnonymousCredentialPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/AnonymousCredentialPolicy.ts
deleted file mode 100644
index c000a4fd70fd..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/AnonymousCredentialPolicy.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyOptions } from "@azure/core-http";
-
-import { CredentialPolicy } from "./CredentialPolicy";
-
-/**
- * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources
- * or for use with Shared Access Signatures (SAS).
- */
-export class AnonymousCredentialPolicy extends CredentialPolicy {
- /**
- * Creates an instance of AnonymousCredentialPolicy.
- * @param nextPolicy -
- * @param options -
- */
-
- // The base class has a protected constructor. Adding a public one to enable constructing of this class.
- /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {
- super(nextPolicy, options);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/policies/CredentialPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/CredentialPolicy.ts
deleted file mode 100644
index a0866f5705f3..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/CredentialPolicy.ts
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { BaseRequestPolicy, HttpOperationResponse, WebResource } from "@azure/core-http";
-
-/**
- * Credential policy used to sign HTTP(S) requests before sending. This is an
- * abstract class.
- */
-export abstract class CredentialPolicy extends BaseRequestPolicy {
- /**
- * Sends out request.
- *
- * @param request -
- */
- public sendRequest(request: WebResource): Promise {
- return this._nextPolicy.sendRequest(this.signRequest(request));
- }
-
- /**
- * Child classes must implement this method with request signing. This method
- * will be executed in {@link sendRequest}.
- *
- * @param request -
- */
- protected signRequest(request: WebResource): WebResource {
- // Child classes must override this method with request signing. This method
- // will be executed in sendRequest().
- return request;
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/policies/StorageBrowserPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/StorageBrowserPolicy.ts
deleted file mode 100644
index b62c2619def8..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/StorageBrowserPolicy.ts
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-import {
- BaseRequestPolicy,
- HttpOperationResponse,
- isNode,
- RequestPolicy,
- RequestPolicyOptions,
- WebResource,
-} from "@azure/core-http";
-
-import { HeaderConstants, UrlConstants } from "../utils/constants";
-import { setURLParameter } from "../utils/utils.common";
-
-/**
- * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:
- *
- * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.
- * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL
- * thus avoid the browser cache.
- *
- * 2. Remove cookie header for security
- *
- * 3. Remove content-length header to avoid browsers warning
- */
-export class StorageBrowserPolicy extends BaseRequestPolicy {
- /**
- * Creates an instance of StorageBrowserPolicy.
- * @param nextPolicy -
- * @param options -
- */
-
- // The base class has a protected constructor. Adding a public one to enable constructing of this class.
- /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {
- super(nextPolicy, options);
- }
-
- /**
- * Sends out request.
- *
- * @param request -
- */
- public async sendRequest(request: WebResource): Promise {
- if (isNode) {
- return this._nextPolicy.sendRequest(request);
- }
-
- if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") {
- request.url = setURLParameter(
- request.url,
- UrlConstants.Parameters.FORCE_BROWSER_NO_CACHE,
- new Date().getTime().toString()
- );
- }
-
- request.headers.remove(HeaderConstants.COOKIE);
-
- // According to XHR standards, content-length should be fully controlled by browsers
- request.headers.remove(HeaderConstants.CONTENT_LENGTH);
-
- // DFS flush file API requires content-length=0, workaround to force browsers add content-length header
- if (request.method === "PATCH" && request.body === undefined) {
- request.body = "";
- }
-
- return this._nextPolicy.sendRequest(request);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/policies/StorageRetryPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/StorageRetryPolicy.ts
deleted file mode 100644
index 95b7e4b5bbb4..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/StorageRetryPolicy.ts
+++ /dev/null
@@ -1,288 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-import { AbortError } from "@azure/abort-controller";
-import {
- AbortSignalLike,
- BaseRequestPolicy,
- HttpOperationResponse,
- RequestPolicy,
- RequestPolicyFactory,
- RequestPolicyOptions,
- RestError,
- WebResource,
-} from "@azure/core-http";
-
-import { StorageRetryOptions } from "../StorageRetryPolicyFactory";
-import { UrlConstants } from "../utils/constants";
-import { delay, setURLHost, setURLParameter } from "../utils/utils.common";
-import { logger } from "../log";
-
-/**
- * A factory method used to generated a RetryPolicy factory.
- *
- * @param retryOptions -
- */
-export function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {
- return {
- create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {
- return new StorageRetryPolicy(nextPolicy, options, retryOptions);
- },
- };
-}
-
-/**
- * RetryPolicy types.
- */
-export enum StorageRetryPolicyType {
- /**
- * Exponential retry. Retry time delay grows exponentially.
- */
- EXPONENTIAL,
- /**
- * Linear retry. Retry time delay grows linearly.
- */
- FIXED,
-}
-
-// Default values of StorageRetryOptions
-const DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {
- maxRetryDelayInMs: 120 * 1000,
- maxTries: 4,
- retryDelayInMs: 4 * 1000,
- retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,
- secondaryHost: "",
- tryTimeoutInMs: undefined, // Use server side default timeout strategy
-};
-
-const RETRY_ABORT_ERROR = new AbortError("The operation was aborted.");
-
-/**
- * Retry policy with exponential retry and linear retry implemented.
- */
-export class StorageRetryPolicy extends BaseRequestPolicy {
- /**
- * RetryOptions.
- */
- private readonly retryOptions: StorageRetryOptions;
-
- /**
- * Creates an instance of RetryPolicy.
- *
- * @param nextPolicy -
- * @param options -
- * @param retryOptions -
- */
- constructor(
- nextPolicy: RequestPolicy,
- options: RequestPolicyOptions,
- retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS
- ) {
- super(nextPolicy, options);
-
- // Initialize retry options
- this.retryOptions = {
- retryPolicyType: retryOptions.retryPolicyType
- ? retryOptions.retryPolicyType
- : DEFAULT_RETRY_OPTIONS.retryPolicyType,
-
- maxTries:
- retryOptions.maxTries && retryOptions.maxTries >= 1
- ? Math.floor(retryOptions.maxTries)
- : DEFAULT_RETRY_OPTIONS.maxTries,
-
- tryTimeoutInMs:
- retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0
- ? retryOptions.tryTimeoutInMs
- : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,
-
- retryDelayInMs:
- retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0
- ? Math.min(
- retryOptions.retryDelayInMs,
- retryOptions.maxRetryDelayInMs
- ? retryOptions.maxRetryDelayInMs
- : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!
- )
- : DEFAULT_RETRY_OPTIONS.retryDelayInMs,
-
- maxRetryDelayInMs:
- retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0
- ? retryOptions.maxRetryDelayInMs
- : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,
-
- secondaryHost: retryOptions.secondaryHost
- ? retryOptions.secondaryHost
- : DEFAULT_RETRY_OPTIONS.secondaryHost,
- };
- }
-
- /**
- * Sends request.
- *
- * @param request -
- */
- public async sendRequest(request: WebResource): Promise {
- return this.attemptSendRequest(request, false, 1);
- }
-
- /**
- * Decide and perform next retry. Won't mutate request parameter.
- *
- * @param request -
- * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then
- * the resource was not found. This may be due to replication delay. So, in this
- * case, we'll never try the secondary again for this operation.
- * @param attempt - How many retries has been attempted to performed, starting from 1, which includes
- * the attempt will be performed by this method call.
- */
- protected async attemptSendRequest(
- request: WebResource,
- secondaryHas404: boolean,
- attempt: number
- ): Promise {
- const newRequest: WebResource = request.clone();
-
- const isPrimaryRetry =
- secondaryHas404 ||
- !this.retryOptions.secondaryHost ||
- !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") ||
- attempt % 2 === 1;
-
- if (!isPrimaryRetry) {
- newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);
- }
-
- // Set the server-side timeout query parameter "timeout=[seconds]"
- if (this.retryOptions.tryTimeoutInMs) {
- newRequest.url = setURLParameter(
- newRequest.url,
- UrlConstants.Parameters.TIMEOUT,
- Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()
- );
- }
-
- let response: HttpOperationResponse | undefined;
- try {
- logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`);
- response = await this._nextPolicy.sendRequest(newRequest);
- if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {
- return response;
- }
-
- secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);
- } catch (err: any) {
- logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);
- if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {
- throw err;
- }
- }
-
- await this.delay(isPrimaryRetry, attempt, request.abortSignal);
- return this.attemptSendRequest(request, secondaryHas404, ++attempt);
- }
-
- /**
- * Decide whether to retry according to last HTTP response and retry counters.
- *
- * @param isPrimaryRetry -
- * @param attempt -
- * @param response -
- * @param err -
- */
- protected shouldRetry(
- isPrimaryRetry: boolean,
- attempt: number,
- response?: HttpOperationResponse,
- err?: RestError
- ): boolean {
- if (attempt >= this.retryOptions.maxTries!) {
- logger.info(
- `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions
- .maxTries!}, no further try.`
- );
- return false;
- }
-
- // Handle network failures, you may need to customize the list when you implement
- // your own http client
- const retriableErrors = [
- "ETIMEDOUT",
- "ESOCKETTIMEDOUT",
- "ECONNREFUSED",
- "ECONNRESET",
- "ENOENT",
- "ENOTFOUND",
- "TIMEOUT",
- "EPIPE",
- "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js
- ];
- if (err) {
- for (const retriableError of retriableErrors) {
- if (
- err.name.toUpperCase().includes(retriableError) ||
- err.message.toUpperCase().includes(retriableError) ||
- (err.code && err.code.toString().toUpperCase() === retriableError)
- ) {
- logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);
- return true;
- }
- }
- }
-
- // If attempt was against the secondary & it returned a StatusNotFound (404), then
- // the resource was not found. This may be due to replication delay. So, in this
- // case, we'll never try the secondary again for this operation.
- if (response || err) {
- const statusCode = response ? response.status : err ? err.statusCode : 0;
- if (!isPrimaryRetry && statusCode === 404) {
- logger.info(`RetryPolicy: Secondary access with 404, will retry.`);
- return true;
- }
-
- // Server internal error or server timeout
- if (statusCode === 503 || statusCode === 500) {
- logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);
- return true;
- }
- }
-
- if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) {
- logger.info(
- "RetryPolicy: Incomplete XML response likely due to service timeout, will retry."
- );
- return true;
- }
-
- return false;
- }
-
- /**
- * Delay a calculated time between retries.
- *
- * @param isPrimaryRetry -
- * @param attempt -
- * @param abortSignal -
- */
- private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {
- let delayTimeInMs: number = 0;
-
- if (isPrimaryRetry) {
- switch (this.retryOptions.retryPolicyType) {
- case StorageRetryPolicyType.EXPONENTIAL:
- delayTimeInMs = Math.min(
- (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,
- this.retryOptions.maxRetryDelayInMs!
- );
- break;
- case StorageRetryPolicyType.FIXED:
- delayTimeInMs = this.retryOptions.retryDelayInMs!;
- break;
- }
- } else {
- delayTimeInMs = Math.random() * 1000;
- }
-
- logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);
- return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/policies/StorageSharedKeyCredentialPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/StorageSharedKeyCredentialPolicy.ts
deleted file mode 100644
index 0d05104680c8..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/StorageSharedKeyCredentialPolicy.ts
+++ /dev/null
@@ -1,182 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-import { RequestPolicy, RequestPolicyOptions, WebResource } from "@azure/core-http";
-
-import { StorageSharedKeyCredential } from "../credentials/StorageSharedKeyCredential";
-import { HeaderConstants } from "../utils/constants";
-import { getURLPath, getURLQueries } from "../utils/utils.common";
-import { CredentialPolicy } from "./CredentialPolicy";
-
-/**
- * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.
- */
-export class StorageSharedKeyCredentialPolicy extends CredentialPolicy {
- /**
- * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy
- */
- private readonly factory: StorageSharedKeyCredential;
-
- /**
- * Creates an instance of StorageSharedKeyCredentialPolicy.
- * @param nextPolicy -
- * @param options -
- * @param factory -
- */
- constructor(
- nextPolicy: RequestPolicy,
- options: RequestPolicyOptions,
- factory: StorageSharedKeyCredential
- ) {
- super(nextPolicy, options);
- this.factory = factory;
- }
-
- /**
- * Signs request.
- *
- * @param request -
- */
- protected signRequest(request: WebResource): WebResource {
- request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());
-
- if (
- request.body &&
- (typeof request.body === "string" || (request.body as Buffer) !== undefined) &&
- request.body.length > 0
- ) {
- request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
- }
-
- const stringToSign: string =
- [
- request.method.toUpperCase(),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),
- this.getHeaderValueToSign(request, HeaderConstants.DATE),
- this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),
- this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),
- this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),
- this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),
- this.getHeaderValueToSign(request, HeaderConstants.RANGE),
- ].join("\n") +
- "\n" +
- this.getCanonicalizedHeadersString(request) +
- this.getCanonicalizedResourceString(request);
-
- const signature: string = this.factory.computeHMACSHA256(stringToSign);
- request.headers.set(
- HeaderConstants.AUTHORIZATION,
- `SharedKey ${this.factory.accountName}:${signature}`
- );
-
- // Workaround for node-fetch which will set content-type for dfs append data operations based on Patch
- if (typeof request.body !== "function" && !request.headers.get(HeaderConstants.CONTENT_TYPE)) {
- request.headers.set(HeaderConstants.CONTENT_TYPE, "");
- }
-
- // console.log(`[URL]:${request.url}`);
- // console.log(`[HEADERS]:${request.headers.toString()}`);
- // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);
- // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
- return request;
- }
-
- /**
- * Retrieve header value according to shared key sign rules.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
- *
- * @param request -
- * @param headerName -
- */
- private getHeaderValueToSign(request: WebResource, headerName: string): string {
- const value = request.headers.get(headerName);
-
- if (!value) {
- return "";
- }
-
- // When using version 2015-02-21 or later, if Content-Length is zero, then
- // set the Content-Length part of the StringToSign to an empty string.
- // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
- if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
- return "";
- }
-
- return value;
- }
-
- /**
- * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:
- * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
- * 2. Convert each HTTP header name to lowercase.
- * 3. Sort the headers lexicographically by header name, in ascending order.
- * Each header may appear only once in the string.
- * 4. Replace any linear whitespace in the header value with a single space.
- * 5. Trim any whitespace around the colon in the header.
- * 6. Finally, append a new-line character to each canonicalized header in the resulting list.
- * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
- *
- * @param request -
- */
- private getCanonicalizedHeadersString(request: WebResource): string {
- let headersArray = request.headers.headersArray().filter((value) => {
- return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);
- });
-
- headersArray.sort((a, b): number => {
- return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
- });
-
- // Remove duplicate headers
- headersArray = headersArray.filter((value, index, array) => {
- if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {
- return false;
- }
- return true;
- });
-
- let canonicalizedHeadersStringToSign: string = "";
- headersArray.forEach((header) => {
- canonicalizedHeadersStringToSign += `${header.name
- .toLowerCase()
- .trimRight()}:${header.value.trimLeft()}\n`;
- });
-
- return canonicalizedHeadersStringToSign;
- }
-
- /**
- * Retrieves the webResource canonicalized resource string.
- *
- * @param request -
- */
- private getCanonicalizedResourceString(request: WebResource): string {
- const path = getURLPath(request.url) || "/";
-
- let canonicalizedResourceString: string = "";
- canonicalizedResourceString += `/${this.factory.accountName}${path}`;
-
- const queries = getURLQueries(request.url);
- const lowercaseQueries: { [key: string]: string } = {};
- if (queries) {
- const queryKeys: string[] = [];
- for (const key in queries) {
- if (Object.prototype.hasOwnProperty.call(queries, key)) {
- const lowercaseKey = key.toLowerCase();
- lowercaseQueries[lowercaseKey] = queries[key];
- queryKeys.push(lowercaseKey);
- }
- }
-
- queryKeys.sort();
- for (const key of queryKeys) {
- canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;
- }
- }
-
- return canonicalizedResourceString;
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/policies/TelemetryPolicy.ts b/sdk/storage/storage-file-datalake/src/policies/TelemetryPolicy.ts
deleted file mode 100644
index e913c6977a1d..000000000000
--- a/sdk/storage/storage-file-datalake/src/policies/TelemetryPolicy.ts
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import {
- BaseRequestPolicy,
- HttpHeaders,
- HttpOperationResponse,
- isNode,
- RequestPolicy,
- RequestPolicyOptions,
- WebResource,
-} from "@azure/core-http";
-
-import { HeaderConstants } from "../utils/constants";
-
-/**
- * TelemetryPolicy is a policy used to tag user-agent header for every requests.
- */
-export class TelemetryPolicy extends BaseRequestPolicy {
- /**
- * Telemetry string.
- */
- public readonly telemetry: string;
-
- /**
- * Creates an instance of TelemetryPolicy.
- * @param nextPolicy -
- * @param options -
- * @param telemetry -
- */
- constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {
- super(nextPolicy, options);
- this.telemetry = telemetry;
- }
-
- /**
- * Sends out request.
- *
- * @param request -
- */
- public async sendRequest(request: WebResource): Promise {
- if (isNode) {
- if (!request.headers) {
- request.headers = new HttpHeaders();
- }
- if (!request.headers.get(HeaderConstants.USER_AGENT)) {
- request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);
- }
- }
-
- return this._nextPolicy.sendRequest(request);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/src/transforms.ts b/sdk/storage/storage-file-datalake/src/transforms.ts
index 5a92a330593a..fd7cc966730a 100644
--- a/sdk/storage/storage-file-datalake/src/transforms.ts
+++ b/sdk/storage/storage-file-datalake/src/transforms.ts
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { URLBuilder } from "@azure/core-http";
+
import { PagedAsyncIterableIterator, PageSettings } from "@azure/core-paging";
import {
ContainerItem,
@@ -8,13 +8,12 @@ import {
PublicAccessType as ContainerPublicAccessType,
} from "@azure/storage-blob";
-import { AclFailedEntry, CpkInfo, PathGetPropertiesResponse } from "./generated/src/models";
+import { AclFailedEntry, CpkInfo } from "./generated/src/models";
import {
AccessControlChangeError,
FileSystemItem,
Metadata,
PathAccessControlItem,
- PathGetAccessControlResponse,
PathPermissions,
PublicAccessType,
RemovePathAccessControlItem,
@@ -40,22 +39,7 @@ import { base64encode } from "./utils/utils.common";
* @param url -
*/
export function toBlobEndpointUrl(url: string): string {
- const urlParsed = URLBuilder.parse(url);
-
- let host = urlParsed.getHost();
- if (host === undefined) {
- throw RangeError(`toBlobEndpointUrl() parameter url ${url} doesn't include valid host.`);
- }
-
- for (const mapping of ToBlobEndpointHostMappings) {
- if (host.includes(mapping[0])) {
- host = host.replace(mapping[0], mapping[1]);
- break;
- }
- }
-
- urlParsed.setHost(host);
- return urlParsed.toString();
+ return mapHostUrl(url, ToBlobEndpointHostMappings, "toBlobEndpointUrl");
}
/**
@@ -73,22 +57,37 @@ export function toBlobEndpointUrl(url: string): string {
* @param url -
*/
export function toDfsEndpointUrl(url: string): string {
- const urlParsed = URLBuilder.parse(url);
+ return mapHostUrl(url, ToDfsEndpointHostMappings, "toDfsEndpointUrl");
+}
- let host = urlParsed.getHost();
+function mapHostUrl(url: string, hostMappings: string[][], callerMethodName: string): string {
+ let urlParsed: URL;
+ try {
+ urlParsed = new URL(url);
+ } catch (e) {
+ // invalid urls are returned unmodified
+ return url;
+ }
+
+ let host = urlParsed.hostname;
if (host === undefined) {
- throw RangeError(`toDfsEndpointUrl() parameter url ${url} doesn't include valid host.`);
+ throw RangeError(`${callerMethodName}() parameter url ${url} doesn't include valid host.`);
}
- for (const mapping of ToDfsEndpointHostMappings) {
+ for (const mapping of hostMappings) {
if (host.includes(mapping[0])) {
host = host.replace(mapping[0], mapping[1]);
break;
}
}
-
- urlParsed.setHost(host);
- return urlParsed.toString();
+ urlParsed.hostname = host;
+ const result = urlParsed.toString();
+ // don't add a trailing slash if one wasn't already present
+ if (!url.endsWith("/") && result.endsWith("/")) {
+ return result.slice(0, -1);
+ } else {
+ return result;
+ }
}
function toFileSystemAsyncIterableIterator(
@@ -202,17 +201,6 @@ export function toProperties(metadata?: Metadata): string | undefined {
return properties.join(",");
}
-export function toPathGetAccessControlResponse(
- response: PathGetPropertiesResponse
-): PathGetAccessControlResponse {
- return {
- ...response,
- _response: response._response,
- permissions: toPermissions(response.permissions),
- acl: toAcl(response.acl),
- };
-}
-
export function toRolePermissions(permissionsString: string): RolePermissions {
const error = new RangeError(
`toRolePermissions() Invalid role permissions string ${permissionsString}`
diff --git a/sdk/storage/storage-file-datalake/src/utils/DataLakeAclChangeFailedError.ts b/sdk/storage/storage-file-datalake/src/utils/DataLakeAclChangeFailedError.ts
index 111f3b092751..c6c40760b533 100644
--- a/sdk/storage/storage-file-datalake/src/utils/DataLakeAclChangeFailedError.ts
+++ b/sdk/storage/storage-file-datalake/src/utils/DataLakeAclChangeFailedError.ts
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { RestError } from "@azure/core-http";
+import { RestError } from "@azure/core-rest-pipeline";
/**
* An error thrown when an operation is interrupted and can be continued later on.
diff --git a/sdk/storage/storage-file-datalake/src/utils/PathClientInternal.ts b/sdk/storage/storage-file-datalake/src/utils/PathClientInternal.ts
index 7cd73a4de79a..8692569abb32 100644
--- a/sdk/storage/storage-file-datalake/src/utils/PathClientInternal.ts
+++ b/sdk/storage/storage-file-datalake/src/utils/PathClientInternal.ts
@@ -1,8 +1,8 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { DataLakePathClient } from "../clients";
-import { Path } from "../generated/src/operations";
-import { Pipeline } from "../Pipeline";
+import { PathOperationsImpl as Path } from "../generated/src/operations";
+import { Pipeline } from "@azure/storage-blob";
/**
* A PathClientInternal represents a URL to the Azure Storage path (directory or file) to
diff --git a/sdk/storage/storage-file-datalake/src/utils/cache.ts b/sdk/storage/storage-file-datalake/src/utils/cache.ts
deleted file mode 100644
index aa51ce19e946..000000000000
--- a/sdk/storage/storage-file-datalake/src/utils/cache.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { DefaultHttpClient } from "@azure/core-http";
-import { IHttpClient } from "../Pipeline";
-
-const _defaultHttpClient = new DefaultHttpClient();
-
-export function getCachedDefaultHttpClient(): IHttpClient {
- return _defaultHttpClient;
-}
diff --git a/sdk/storage/storage-file-datalake/src/utils/tracing.ts b/sdk/storage/storage-file-datalake/src/utils/tracing.ts
index 6c49071b8036..bf3b2dabc312 100644
--- a/sdk/storage/storage-file-datalake/src/utils/tracing.ts
+++ b/sdk/storage/storage-file-datalake/src/utils/tracing.ts
@@ -1,7 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { OperationOptions, RequestOptionsBase } from "@azure/core-http";
import { createSpanFunction } from "@azure/core-tracing";
/**
@@ -12,20 +11,3 @@ export const createSpan = createSpanFunction({
packagePrefix: "Azure.Storage.DataLake",
namespace: "Microsoft.Storage",
});
-
-/**
- * @internal
- *
- * Adapt the tracing options from OperationOptions to what they need to be for
- * RequestOptionsBase (when we update to later OpenTelemetry versions this is now
- * two separate fields, not just one).
- */
-export function convertTracingToRequestOptionsBase(
- options?: OperationOptions
-): Pick {
- return {
- // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.
- spanOptions: (options?.tracingOptions as any)?.spanOptions,
- tracingContext: options?.tracingOptions?.tracingContext,
- };
-}
diff --git a/sdk/storage/storage-file-datalake/src/utils/utils.common.ts b/sdk/storage/storage-file-datalake/src/utils/utils.common.ts
index b2c9990c264a..a3b72647bf3d 100644
--- a/sdk/storage/storage-file-datalake/src/utils/utils.common.ts
+++ b/sdk/storage/storage-file-datalake/src/utils/utils.common.ts
@@ -1,8 +1,8 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { AbortSignalLike } from "@azure/abort-controller";
-import { HttpHeaders, isNode, URLBuilder } from "@azure/core-http";
-import { ContainerEncryptionScope } from "@azure/storage-blob";
+import { createHttpHeaders, HttpHeaders } from "@azure/core-rest-pipeline";
+import { isNode } from "@azure/core-util";
+import { ContainerEncryptionScope, WithResponse } from "@azure/storage-blob";
import { CpkInfo, FileSystemEncryptionScope } from "../models";
import {
@@ -66,13 +66,13 @@ import {
* @param url -
*/
export function escapeURLPath(url: string): string {
- const urlParsed = URLBuilder.parse(url);
+ const urlParsed = new URL(url);
- let path = urlParsed.getPath();
+ let path = urlParsed.pathname;
path = path || "/";
path = escape(path);
- urlParsed.setPath(path);
+ urlParsed.pathname = path;
return urlParsed.toString();
}
@@ -192,7 +192,7 @@ export function extractConnectionStringParts(connectionString: string): Connecti
} else {
// SAS connection string
- const accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
+ let accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
const accountName = getAccountNameFromUrl(blobEndpoint);
if (!blobEndpoint) {
throw new Error("Invalid BlobEndpoint in the provided SAS Connection String");
@@ -200,6 +200,11 @@ export function extractConnectionStringParts(connectionString: string): Connecti
throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String");
}
+ // remove test SAS
+ if (accountSas === "fakeSasToken") {
+ accountSas = "";
+ }
+
return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas };
}
}
@@ -226,11 +231,11 @@ function escape(text: string): string {
* @returns An updated URL string
*/
export function appendToURLPath(url: string, name: string): string {
- const urlParsed = URLBuilder.parse(url);
+ const urlParsed = new URL(url);
- let path = urlParsed.getPath();
+ let path = urlParsed.pathname;
path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name;
- urlParsed.setPath(path);
+ urlParsed.pathname = path;
return urlParsed.toString();
}
@@ -243,16 +248,16 @@ export function appendToURLPath(url: string, name: string): string {
* @returns An updated URL string.
*/
export function appendToURLQuery(url: string, queryParts: string): string {
- const urlParsed = URLBuilder.parse(url);
+ const urlParsed = new URL(url);
- let query = urlParsed.getQuery();
+ let query = urlParsed.search;
if (query) {
query += "&" + queryParts;
} else {
query = queryParts;
}
- urlParsed.setQuery(query);
+ urlParsed.search = query;
return urlParsed.toString();
}
@@ -266,8 +271,28 @@ export function appendToURLQuery(url: string, queryParts: string): string {
* @returns An updated URL string
*/
export function setURLParameter(url: string, name: string, value?: string): string {
- const urlParsed = URLBuilder.parse(url);
- urlParsed.setQueryParameter(name, value);
+ const urlParsed = new URL(url);
+ const encodedName = encodeURIComponent(name);
+ const encodedValue = value ? encodeURIComponent(value) : undefined;
+ // mutating searchParams will change the encoding, so we have to do this ourselves
+ const searchString = urlParsed.search === "" ? "?" : urlParsed.search;
+
+ const searchPieces: string[] = [];
+
+ for (const pair of searchString.slice(1).split("&")) {
+ if (pair) {
+ const [key] = pair.split("=", 2);
+ if (key !== encodedName) {
+ searchPieces.push(pair);
+ }
+ }
+ }
+ if (encodedValue) {
+ searchPieces.push(`${encodedName}=${encodedValue}`);
+ }
+
+ urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : "";
+
return urlParsed.toString();
}
@@ -278,8 +303,8 @@ export function setURLParameter(url: string, name: string, value?: string): stri
* @param name -
*/
export function getURLParameter(url: string, name: string): string | string[] | undefined {
- const urlParsed = URLBuilder.parse(url);
- return urlParsed.getQueryParameterValue(name);
+ const urlParsed = new URL(url);
+ return urlParsed.searchParams.get(name) ?? undefined;
}
/**
@@ -290,8 +315,8 @@ export function getURLParameter(url: string, name: string): string | string[] |
* @returns An updated URL string
*/
export function setURLHost(url: string, host: string): string {
- const urlParsed = URLBuilder.parse(url);
- urlParsed.setHost(host);
+ const urlParsed = new URL(url);
+ urlParsed.hostname = host;
return urlParsed.toString();
}
@@ -301,8 +326,12 @@ export function setURLHost(url: string, host: string): string {
* @param url - Source URL string
*/
export function getURLPath(url: string): string | undefined {
- const urlParsed = URLBuilder.parse(url);
- return urlParsed.getPath();
+ try {
+ const urlParsed = new URL(url);
+ return urlParsed.pathname;
+ } catch (e) {
+ return undefined;
+ }
}
/**
@@ -311,9 +340,9 @@ export function getURLPath(url: string): string | undefined {
* @param url -
* @param path -
*/
-export function setURLPath(url: string, path?: string): string {
- const urlParsed = URLBuilder.parse(url);
- urlParsed.setPath(path);
+export function setURLPath(url: string, path: string): string {
+ const urlParsed = new URL(url);
+ urlParsed.pathname = path;
return urlParsed.toString();
}
@@ -323,8 +352,12 @@ export function setURLPath(url: string, path?: string): string {
* @param url - Source URL string
*/
export function getURLScheme(url: string): string | undefined {
- const urlParsed = URLBuilder.parse(url);
- return urlParsed.getScheme();
+ try {
+ const urlParsed = new URL(url);
+ return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol;
+ } catch (e) {
+ return undefined;
+ }
}
/**
@@ -333,13 +366,13 @@ export function getURLScheme(url: string): string | undefined {
* @param url - Source URL string
*/
export function getURLPathAndQuery(url: string): string | undefined {
- const urlParsed = URLBuilder.parse(url);
- const pathString = urlParsed.getPath();
+ const urlParsed = new URL(url);
+ const pathString = urlParsed.pathname;
if (!pathString) {
throw new RangeError("Invalid url without valid path.");
}
- let queryString = urlParsed.getQuery() || "";
+ let queryString = urlParsed.search || "";
queryString = queryString.trim();
if (queryString !== "") {
queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?'
@@ -354,13 +387,13 @@ export function getURLPathAndQuery(url: string): string | undefined {
* @param url -
*/
export function getURLQueries(url: string): { [key: string]: string } {
- let queryString = URLBuilder.parse(url).getQuery();
+ let queryString = new URL(url).search;
if (!queryString) {
return {};
}
queryString = queryString.trim();
- queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString;
+ queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString;
let querySubStrings: string[] = queryString.split("&");
querySubStrings = querySubStrings.filter((value: string) => {
@@ -382,16 +415,6 @@ export function getURLQueries(url: string): { [key: string]: string } {
return queries;
}
-/**
- * Get URL query string.
- *
- * @param url -
- */
-export function getURLQueryString(url: string): string | undefined {
- const urlParsed = URLBuilder.parse(url);
- return urlParsed.getQuery();
-}
-
/**
* Set URL query string.
*
@@ -399,8 +422,8 @@ export function getURLQueryString(url: string): string | undefined {
* @param queryString -
*/
export function setURLQueries(url: string, queryString: string): string {
- const urlParsed = URLBuilder.parse(url);
- urlParsed.setQuery(queryString);
+ const urlParsed = new URL(url);
+ urlParsed.search = queryString;
return urlParsed.toString();
}
@@ -462,43 +485,6 @@ export function generateBlockID(blockIDPrefix: string, blockIndex: number): stri
return base64encode(res);
}
-/**
- * Delay specified time interval.
- *
- * @param timeInMs -
- * @param aborter -
- * @param abortError -
- */
-export async function delay(
- timeInMs: number,
- aborter?: AbortSignalLike,
- abortError?: Error
-): Promise {
- return new Promise((resolve, reject) => {
- /* eslint-disable-next-line prefer-const*/
- let timeout: any;
-
- const abortHandler = () => {
- if (timeout !== undefined) {
- clearTimeout(timeout);
- }
- reject(abortError);
- };
-
- const resolveHandler = () => {
- if (aborter !== undefined) {
- aborter.removeEventListener("abort", abortHandler);
- }
- resolve();
- };
-
- timeout = setTimeout(resolveHandler, timeInMs);
- if (aborter !== undefined) {
- aborter.addEventListener("abort", abortHandler);
- }
- });
-}
-
export function sanitizeURL(url: string): string {
let safeURL: string = url;
if (getURLParameter(safeURL, UrlConstants.Parameters.SIGNATURE)) {
@@ -509,14 +495,14 @@ export function sanitizeURL(url: string): string {
}
export function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {
- const headers: HttpHeaders = new HttpHeaders();
- for (const header of originalHeader.headersArray()) {
- if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {
- headers.set(header.name, "*****");
- } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {
- headers.set(header.name, sanitizeURL(header.value));
+ const headers: HttpHeaders = createHttpHeaders();
+ for (const [name, value] of originalHeader) {
+ if (name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {
+ headers.set(name, "*****");
+ } else if (name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {
+ headers.set(name, sanitizeURL(value));
} else {
- headers.set(header.name, header.value);
+ headers.set(name, value);
}
}
@@ -533,40 +519,34 @@ export function iEqual(str1: string, str2: string): boolean {
}
/**
- * Extracts account name from the blobEndpointUrl
- * @param blobEndpointUrl - blobEndpointUrl to extract the account name from
- * @returns account name
+ * Extracts account name from the url
+ * @param url - url to extract the account name from
+ * @returns with the account name
*/
-export function getAccountNameFromUrl(blobEndpointUrl: string): string {
- const parsedUrl: URLBuilder = URLBuilder.parse(blobEndpointUrl);
+export function getAccountNameFromUrl(url: string): string {
+ const parsedUrl = new URL(url);
let accountName;
try {
- if (parsedUrl.getHost()!.split(".")[1] === "blob") {
+ if (parsedUrl.hostname.split(".")[1] === "blob") {
// `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;
- accountName = parsedUrl.getHost()!.split(".")[0];
+ accountName = parsedUrl.hostname.split(".")[0];
} else if (isIpEndpointStyle(parsedUrl)) {
// IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/
// Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/
// .getPath() -> /devstoreaccount1/
- accountName = parsedUrl.getPath()!.split("/")[1];
+ accountName = parsedUrl.pathname.split("/")[1];
} else {
// Custom domain case: "https://customdomain.com/containername/blob".
accountName = "";
}
-
return accountName;
} catch (error: any) {
throw new Error("Unable to extract accountName with provided information.");
}
}
-export function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {
- if (parsedUrl.getHost() === undefined) {
- return false;
- }
-
- const host =
- parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort());
+export function isIpEndpointStyle(parsedUrl: URL): boolean {
+ const host = parsedUrl.host;
// Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.
// Case 2: localhost(:port), use broad regex to match port part.
@@ -576,7 +556,7 @@ export function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {
/^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(
host
) ||
- (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()!))
+ (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))
);
}
@@ -631,3 +611,19 @@ export function EscapePath(pathName: string): string {
}
return split.join("/");
}
+
+/**
+ * A typesafe helper for ensuring that a given response object has
+ * the original _response attached.
+ * @param response - A response object from calling a client operation
+ * @returns The same object, but with known _response property
+ */
+export function assertResponse(
+ response: T
+): WithResponse {
+ if (`_response` in response) {
+ return response as WithResponse;
+ }
+
+ throw new TypeError(`Unexpected response object ${response}`);
+}
diff --git a/sdk/storage/storage-file-datalake/swagger/README.md b/sdk/storage/storage-file-datalake/swagger/README.md
index 649c635aed2e..1d270c3dc83c 100644
--- a/sdk/storage/storage-file-datalake/swagger/README.md
+++ b/sdk/storage/storage-file-datalake/swagger/README.md
@@ -17,9 +17,10 @@ model-date-time-as-string: true
optional-response-headers: true
v3: true
disable-async-iterators: true
+core-http-compat-mode: true
add-credentials: false
use-extension:
- "@autorest/typescript": "6.0.0-dev.20210223.1"
+ "@autorest/typescript": "6.0.0-rc.8"
package-version: 12.12.0
```
diff --git a/sdk/storage/storage-file-datalake/test/aborter.spec.ts b/sdk/storage/storage-file-datalake/test/aborter.spec.ts
index 0301c314402b..9cabcadebaf9 100644
--- a/sdk/storage/storage-file-datalake/test/aborter.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/aborter.spec.ts
@@ -32,7 +32,6 @@ describe("Aborter", () => {
assert.fail();
} catch (err: any) {
assert.equal(err.name, "AbortError");
- assert.equal(err.message, "The operation was aborted.", "Unexpected error caught: " + err);
}
});
@@ -49,7 +48,6 @@ describe("Aborter", () => {
assert.fail();
} catch (err: any) {
assert.equal(err.name, "AbortError");
- assert.equal(err.message, "The operation was aborted.", "Unexpected error caught: " + err);
}
});
@@ -74,7 +72,6 @@ describe("Aborter", () => {
assert.fail();
} catch (err: any) {
assert.equal(err.name, "AbortError");
- assert.equal(err.message, "The operation was aborted.", "Unexpected error caught: " + err);
}
});
});
diff --git a/sdk/storage/storage-file-datalake/test/browser/highlevel.browser.spec.ts b/sdk/storage/storage-file-datalake/test/browser/highlevel.browser.spec.ts
index c0526f2bf50e..d1b7b47bddf1 100644
--- a/sdk/storage/storage-file-datalake/test/browser/highlevel.browser.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/browser/highlevel.browser.spec.ts
@@ -104,7 +104,7 @@ describe("Highlevel browser only", () => {
},
});
} catch (err: any) {
- assert.equal(err.message, "The operation was aborted.", "Unexpected error caught: " + err);
+ assert.equal(err.name, "AbortError");
}
assert.ok(eventTriggered);
});
@@ -129,7 +129,7 @@ describe("Highlevel browser only", () => {
singleUploadThreshold: 8 * MB,
});
} catch (err: any) {
- assert.equal(err.message, "The operation was aborted.", "Unexpected error caught: " + err);
+ assert.equal(err.name, "AbortError");
}
assert.ok(eventTriggered);
});
diff --git a/sdk/storage/storage-file-datalake/test/node/filesystemclient.spec.ts b/sdk/storage/storage-file-datalake/test/node/filesystemclient.spec.ts
index 9580819ec154..5cbba6a15906 100644
--- a/sdk/storage/storage-file-datalake/test/node/filesystemclient.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/node/filesystemclient.spec.ts
@@ -1,17 +1,12 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { TokenCredential } from "@azure/core-http";
+import { TokenCredential } from "@azure/core-auth";
import { record, Recorder } from "@azure-tools/test-recorder";
import { assert } from "chai";
import { Context } from "mocha";
-import {
- DataLakeFileSystemClient,
- FileSystemSASPermissions,
- newPipeline,
- StorageSharedKeyCredential,
-} from "../../src";
+import { DataLakeFileSystemClient, FileSystemSASPermissions, newPipeline } from "../../src";
import { PublicAccessType } from "../../src/models";
import { getDataLakeServiceClient, recorderEnvSetup } from "../utils";
import { assertClientUsesTokenCredential } from "../utils/assert";
@@ -81,8 +76,7 @@ describe("DataLakeFileSystemClient Node.js only", () => {
});
it("can be created with a url and a credential", async () => {
- const factories = (fileSystemClient as any).pipeline.factories;
- const credential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const credential = fileSystemClient.credential;
const newClient = new DataLakeFileSystemClient(fileSystemClient.url, credential);
const result = await newClient.getProperties();
@@ -99,8 +93,7 @@ describe("DataLakeFileSystemClient Node.js only", () => {
});
it("can be created with a url and a credential and an option bag", async () => {
- const factories = (fileSystemClient as any).pipeline.factories;
- const credential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const credential = fileSystemClient.credential;
const newClient = new DataLakeFileSystemClient(fileSystemClient.url, credential, {
retryOptions: {
maxTries: 5,
@@ -133,8 +126,7 @@ describe("DataLakeFileSystemClient Node.js only", () => {
});
it("can be created with a url and a pipeline", async () => {
- const factories = (fileSystemClient as any).pipeline.factories;
- const credential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const credential = fileSystemClient.credential;
const pipeline = newPipeline(credential);
const newClient = new DataLakeFileSystemClient(fileSystemClient.url, pipeline);
diff --git a/sdk/storage/storage-file-datalake/test/node/sas.spec.ts b/sdk/storage/storage-file-datalake/test/node/sas.spec.ts
index 8c97afaddc54..a3ca01789ef9 100644
--- a/sdk/storage/storage-file-datalake/test/node/sas.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/node/sas.spec.ts
@@ -2,7 +2,7 @@
// Licensed under the MIT license.
import { UserDelegationKey } from "@azure/storage-blob";
-import { record, Recorder } from "@azure-tools/test-recorder";
+import { record, Recorder, delay } from "@azure-tools/test-recorder";
import { assert } from "chai";
import { Context } from "mocha";
@@ -28,7 +28,6 @@ import {
import { DataLakeFileClient } from "../../src/";
import { DirectorySASPermissions } from "../../src/sas/DirectorySASPermissions";
import { SASProtocol } from "../../src/sas/SASQueryParameters";
-import { delay } from "../../src/utils/utils.common";
import {
getDataLakeServiceClient,
getDataLakeServiceClientWithDefaultCredential,
@@ -56,9 +55,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const sas = generateAccountSASQueryParameters(
{
@@ -91,9 +88,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const sas = generateAccountSASQueryParameters(
{
@@ -126,9 +121,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const sas = generateAccountSASQueryParameters(
{
@@ -160,9 +153,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const sas = generateAccountSASQueryParameters(
{
@@ -194,9 +185,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const sas = generateAccountSASQueryParameters(
{
@@ -234,9 +223,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -277,9 +264,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -316,9 +301,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -373,9 +356,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -434,9 +415,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -476,9 +455,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem-with-dash");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -536,9 +513,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -594,9 +569,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
const tmr = recorder.newDate("tmr");
tmr.setDate(tmr.getDate() + 10);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1];
+ const sharedKeyCredential = serviceClient.credential;
const fileSystemName = recorder.getUniqueName("filesystem");
const fileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
@@ -668,9 +641,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
tmr.setDate(tmr.getDate() + 5);
const userDelegationKey = await serviceClientWithToken!.getUserDelegationKey(now, tmr);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const sharedKeyCredential = serviceClient.credential as StorageSharedKeyCredential;
const accountName = sharedKeyCredential.accountName;
const fileSystemName = recorder.getUniqueName("filesystem");
@@ -724,9 +695,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
tmr.setDate(tmr.getDate() + 5);
const userDelegationKey = await serviceClientWithToken!.getUserDelegationKey(now, tmr);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const sharedKeyCredential = serviceClient.credential as StorageSharedKeyCredential;
const accountName = sharedKeyCredential.accountName;
const fileSystemName = recorder.getUniqueName("filesystem");
@@ -776,9 +745,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
tmr.setDate(tmr.getDate() + 5);
const userDelegationKey = await serviceClientWithToken!.getUserDelegationKey(now, tmr);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const sharedKeyCredential = serviceClient.credential as StorageSharedKeyCredential;
const accountName = sharedKeyCredential.accountName;
const fileSystemName = recorder.getUniqueName("filesystem");
@@ -850,9 +817,7 @@ describe("Shared Access Signature (SAS) generation Node.js only", () => {
tmr.setDate(tmr.getDate() + 5);
const userDelegationKey = await serviceClientWithToken!.getUserDelegationKey(now, tmr);
- // By default, credential is always the last element of pipeline factories
- const factories = (serviceClient as any).pipeline.factories;
- const sharedKeyCredential = factories[factories.length - 1] as StorageSharedKeyCredential;
+ const sharedKeyCredential = serviceClient.credential as StorageSharedKeyCredential;
const accountName = sharedKeyCredential.accountName;
const fileSystemName = recorder.getUniqueName("filesystem");
@@ -1603,7 +1568,6 @@ describe("SAS generation Node.js only for delegation SAS", () => {
},
];
await rootDirectoryClient.setAccessControl(acl);
-
const fileSystemSAS = generateDataLakeSASQueryParameters(
{
fileSystemName: fileSystemClient.name,
@@ -1637,6 +1601,7 @@ describe("SAS generation Node.js only for delegation SAS", () => {
const newFileClientWithSAS2 = new DataLakeFileClient(`${newFileClient.url}?${fileSystemSAS2}`);
try {
await newFileClientWithSAS2.createIfNotExists();
+ assert.fail("Expected createdIfNotExists to fail");
} catch (err: any) {
assert.deepStrictEqual(err.details.errorCode, "AuthorizationPermissionMismatch");
}
diff --git a/sdk/storage/storage-file-datalake/test/pathclient.spec.ts b/sdk/storage/storage-file-datalake/test/pathclient.spec.ts
index aab2e75f3cfd..3e3eefa6572e 100644
--- a/sdk/storage/storage-file-datalake/test/pathclient.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/pathclient.spec.ts
@@ -2,9 +2,9 @@
// Licensed under the MIT license.
import { AbortController } from "@azure/abort-controller";
-import { isNode, delay } from "@azure/core-http";
+import { isNode } from "@azure/core-util";
import { SpanGraph, setTracer } from "@azure/test-utils";
-import { isPlaybackMode, record, Recorder } from "@azure-tools/test-recorder";
+import { isPlaybackMode, record, Recorder, delay } from "@azure-tools/test-recorder";
import { setSpan, context } from "@azure/core-tracing";
import { assert } from "chai";
diff --git a/sdk/storage/storage-file-datalake/test/retrypolicy.spec.ts b/sdk/storage/storage-file-datalake/test/retrypolicy.spec.ts
deleted file mode 100644
index b3ddf48d1235..000000000000
--- a/sdk/storage/storage-file-datalake/test/retrypolicy.spec.ts
+++ /dev/null
@@ -1,171 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { URLBuilder } from "@azure/core-http";
-import { assert } from "chai";
-
-import { AbortController } from "@azure/abort-controller";
-import { DataLakeFileSystemClient, RestError, DataLakeServiceClient } from "../src";
-import { newPipeline, Pipeline } from "../src/Pipeline";
-import { getDataLakeServiceClient, recorderEnvSetup } from "./utils";
-import { InjectorPolicyFactory } from "./utils/InjectorPolicyFactory";
-import { record, Recorder } from "@azure-tools/test-recorder";
-import { Context } from "mocha";
-
-describe("RetryPolicy", () => {
- let fileSystemName: string;
- let dataLakeFileSystemClient: DataLakeFileSystemClient;
-
- let recorder: Recorder;
- let serviceClient: DataLakeServiceClient;
- beforeEach(async function (this: Context) {
- recorder = record(this, recorderEnvSetup);
- serviceClient = getDataLakeServiceClient();
- fileSystemName = recorder.getUniqueName("container");
- dataLakeFileSystemClient = serviceClient.getFileSystemClient(fileSystemName);
- await dataLakeFileSystemClient.createIfNotExists();
- });
-
- afterEach(async function () {
- await dataLakeFileSystemClient.deleteIfExists();
- await recorder.stop();
- });
-
- it("Retry Policy should work when first request fails with 500", async () => {
- let injectCounter = 0;
- const injector = new InjectorPolicyFactory(() => {
- if (injectCounter === 0) {
- injectCounter++;
- return new RestError("Server Internal Error", "ServerInternalError", 500);
- }
- return;
- });
- const factories = (dataLakeFileSystemClient as any).pipeline.factories.slice(); // clone factories array
- factories.push(injector);
- const pipeline = new Pipeline(factories);
- const injectContainerClient = new DataLakeFileSystemClient(
- dataLakeFileSystemClient.url,
- pipeline
- );
-
- const metadata = {
- key0: "val0",
- keya: "vala",
- keyb: "valb",
- };
- await injectContainerClient.setMetadata(metadata);
-
- const result = await dataLakeFileSystemClient.getProperties();
- assert.deepEqual(result.metadata, metadata);
- });
-
- it("Retry Policy should abort when abort event trigger during retry interval", async () => {
- let injectCounter = 0;
- const injector = new InjectorPolicyFactory(() => {
- if (injectCounter < 2) {
- injectCounter++;
- return new RestError("Server Internal Error", "ServerInternalError", 500);
- }
- return;
- });
-
- const factories = (dataLakeFileSystemClient as any).pipeline.factories.slice(); // clone factories array
- factories.push(injector);
- const pipeline = new Pipeline(factories);
- const injectContainerClient = new DataLakeFileSystemClient(
- dataLakeFileSystemClient.url,
- pipeline
- );
-
- const metadata = {
- key0: "val0",
- keya: "vala",
- keyb: "valb",
- };
-
- let hasError = false;
- try {
- // Default exponential retry delay is 4000ms. Wait for 2000ms to abort which makes sure the aborter
- // happens between 2 requests
- await injectContainerClient.setMetadata(metadata, {
- abortSignal: AbortController.timeout(2 * 1000),
- });
- } catch (err: any) {
- hasError = true;
- }
- assert.ok(hasError);
- });
-
- it("Retry Policy should failed when requests always fail with 500", async () => {
- const injector = new InjectorPolicyFactory(() => {
- return new RestError("Server Internal Error", "ServerInternalError", 500);
- });
-
- const credential = (dataLakeFileSystemClient as any).pipeline.factories[
- (dataLakeFileSystemClient as any).pipeline.factories.length - 1
- ];
- const factories = newPipeline(credential, {
- retryOptions: { maxTries: 3 },
- }).factories;
- factories.push(injector);
- const pipeline = new Pipeline(factories);
- const injectContainerClient = new DataLakeFileSystemClient(
- dataLakeFileSystemClient.url,
- pipeline
- );
-
- let hasError = false;
- try {
- const metadata = {
- key0: "val0",
- keya: "vala",
- keyb: "valb",
- };
- await injectContainerClient.setMetadata(metadata);
- } catch (err: any) {
- hasError = true;
- }
- assert.ok(hasError);
- });
-
- it("Retry Policy should work for secondary endpoint", async () => {
- let injectCounter = 0;
- const injector = new InjectorPolicyFactory(() => {
- if (injectCounter++ < 1) {
- return new RestError("Server Internal Error", "ServerInternalError", 500);
- }
- return;
- });
-
- const url = serviceClient.url;
- const urlParsed = URLBuilder.parse(url);
- const host = urlParsed.getHost()!;
- const hostParts = host.split(".");
- const account = hostParts.shift();
- const secondaryAccount = `${account}-secondary`;
- hostParts.unshift(secondaryAccount);
- const secondaryHost = hostParts.join(".");
-
- const credential = (dataLakeFileSystemClient as any).pipeline.factories[
- (dataLakeFileSystemClient as any).pipeline.factories.length - 1
- ];
- const pipeline = newPipeline(credential, {
- retryOptions: { maxTries: 2, secondaryHost },
- });
- pipeline.factories.push(injector);
- const injectContainerClient = new DataLakeFileSystemClient(
- dataLakeFileSystemClient.url,
- pipeline
- );
-
- let finalRequestURL = "";
- try {
- const response = await injectContainerClient.getProperties();
- finalRequestURL = response._response.request.url;
- } catch (err: any) {
- finalRequestURL = err.request ? err.request.url : "";
- }
-
- assert.deepStrictEqual(URLBuilder.parse(finalRequestURL).getHost(), secondaryHost);
- });
-});
diff --git a/sdk/storage/storage-file-datalake/test/utils.spec.ts b/sdk/storage/storage-file-datalake/test/utils.spec.ts
index 069b09d18f29..badad426a8d8 100644
--- a/sdk/storage/storage-file-datalake/test/utils.spec.ts
+++ b/sdk/storage/storage-file-datalake/test/utils.spec.ts
@@ -2,7 +2,7 @@
// Licensed under the MIT license.
import { assert } from "chai";
-import { HttpHeaders } from "../src";
+import { createHttpHeaders } from "@azure/core-rest-pipeline";
import {
sanitizeHeaders,
sanitizeURL,
@@ -56,7 +56,7 @@ describe("Utility Helpers", () => {
it("sanitizeHeaders redacts SAS token", () => {
const url = "https://some.url.com/container/blob?sig=sasstring";
- const headers = new HttpHeaders();
+ const headers = createHttpHeaders();
headers.set("authorization", "Bearer abcdefg");
headers.set("x-ms-copy-source", url);
headers.set("otherheader", url);
diff --git a/sdk/storage/storage-file-datalake/test/utils/InjectorPolicy.ts b/sdk/storage/storage-file-datalake/test/utils/InjectorPolicy.ts
deleted file mode 100644
index 0651f16623f2..000000000000
--- a/sdk/storage/storage-file-datalake/test/utils/InjectorPolicy.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import {
- BaseRequestPolicy,
- HttpOperationResponse,
- RequestPolicy,
- RequestPolicyOptions,
- WebResource,
- RestError,
-} from "../../src";
-
-export interface NextInjectErrorHolder {
- nextInjectError?: RestError;
-}
-
-export type Injector = () => RestError | undefined;
-
-/**
- * InjectorPolicy will inject a customized error before next HTTP request.
- */
-export class InjectorPolicy extends BaseRequestPolicy {
- /**
- * Creates an instance of InjectorPolicy.
- *
- * @param nextPolicy -
- * @param options -
- */
- public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, injector: Injector) {
- super(nextPolicy, options);
- this.injector = injector;
- }
-
- /**
- * Sends request.
- *
- * @param request -
- */
- public async sendRequest(request: WebResource): Promise {
- const error = this.injector();
- if (error) {
- throw error;
- }
- return this._nextPolicy.sendRequest(request);
- }
-
- private injector: Injector;
-}
diff --git a/sdk/storage/storage-file-datalake/test/utils/InjectorPolicyFactory.ts b/sdk/storage/storage-file-datalake/test/utils/InjectorPolicyFactory.ts
deleted file mode 100644
index b0c36c384558..000000000000
--- a/sdk/storage/storage-file-datalake/test/utils/InjectorPolicyFactory.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-
-import { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "../../src";
-import { InjectorPolicy, Injector } from "./InjectorPolicy";
-
-/**
- * InjectorPolicyFactory is a factory class which injects customized errors for retry policy testing.
- */
-export class InjectorPolicyFactory implements RequestPolicyFactory {
- public readonly injector: Injector;
-
- public constructor(injector: Injector) {
- this.injector = injector;
- }
-
- public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): InjectorPolicy {
- return new InjectorPolicy(nextPolicy, options, this.injector);
- }
-}
diff --git a/sdk/storage/storage-file-datalake/test/utils/assert.ts b/sdk/storage/storage-file-datalake/test/utils/assert.ts
index 666009af6a59..887295b20c7b 100644
--- a/sdk/storage/storage-file-datalake/test/utils/assert.ts
+++ b/sdk/storage/storage-file-datalake/test/utils/assert.ts
@@ -1,11 +1,10 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
+import { isTokenCredential } from "@azure/core-auth";
import { assert } from "chai";
import { StorageClient } from "../../src/StorageClient";
export function assertClientUsesTokenCredential(client: StorageClient): void {
- const factories = (client as any).pipeline.factories;
- const authPolicy = factories[factories.length - 1].create();
- assert.strictEqual(authPolicy.constructor.name, "BearerTokenAuthenticationPolicy");
+ assert.isTrue(isTokenCredential(client.credential));
}
diff --git a/sdk/storage/storage-file-datalake/test/utils/index.browser.ts b/sdk/storage/storage-file-datalake/test/utils/index.browser.ts
index 35b276558742..4ed889145149 100644
--- a/sdk/storage/storage-file-datalake/test/utils/index.browser.ts
+++ b/sdk/storage/storage-file-datalake/test/utils/index.browser.ts
@@ -1,18 +1,18 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { TokenCredential } from "@azure/core-http";
+import { TokenCredential } from "@azure/core-auth";
import { DataLakeServiceClient } from "../../src";
-import { AnonymousCredential } from "../../src/credentials/AnonymousCredential";
-import { newPipeline } from "../../src/Pipeline";
+import { setTestOnlySetHttpClient } from "../../src/StorageClient";
+import { newPipeline, AnonymousCredential } from "@azure/storage-blob";
import { SimpleTokenCredential } from "./testutils.common";
+import { createXhrHttpClient } from "@azure/test-utils";
+import { isLiveMode } from "@azure-tools/test-recorder";
export * from "./testutils.common";
-export function getGenericCredential(accountType: string): AnonymousCredential {
- const _accountType = accountType; // bypass compiling error
- accountType = _accountType;
+export function getGenericCredential(): AnonymousCredential {
return new AnonymousCredential();
}
@@ -31,6 +31,9 @@ export function getGenericDataLakeServiceClient(
accountType: string,
accountNameSuffix: string = ""
): DataLakeServiceClient {
+ if (!isLiveMode()) {
+ setTestOnlySetHttpClient(createXhrHttpClient());
+ }
const accountNameEnvVar = `${accountType}ACCOUNT_NAME`;
const accountSASEnvVar = `${accountType}ACCOUNT_SAS`;
@@ -45,20 +48,23 @@ export function getGenericDataLakeServiceClient(
);
}
- if (accountSAS) {
- accountSAS = accountSAS.startsWith("?") ? accountSAS : `?${accountSAS}`;
+ accountSAS = accountSAS.startsWith("?") ? accountSAS : `?${accountSAS}`;
+
+ // don't add the test account SAS value.
+ if (accountSAS === "?fakeSasToken") {
+ accountSAS = "";
}
- const credentials = getGenericCredential(accountType);
- const pipeline = newPipeline(credentials, {
- // Enable logger when debugging
- // logger: new ConsoleHttpPipelineLogger(HttpPipelineLogLevel.INFO)
- });
+ const credentials = getGenericCredential();
+ const pipeline = newPipeline(credentials);
const dfsPrimaryURL = `https://${accountName}${accountNameSuffix}.dfs.core.windows.net${accountSAS}`;
return new DataLakeServiceClient(dfsPrimaryURL, pipeline);
}
export function getTokenDataLakeServiceClient(): DataLakeServiceClient {
+ if (!isLiveMode()) {
+ setTestOnlySetHttpClient(createXhrHttpClient());
+ }
const accountNameEnvVar = `DFS_ACCOUNT_NAME`;
const accountName = (self as any).__env__[accountNameEnvVar];
@@ -167,6 +173,14 @@ export function getBrowserFile(name: string, size: number): File {
}
export function getSASConnectionStringFromEnvironment(): string {
+ if (!isLiveMode()) {
+ setTestOnlySetHttpClient(createXhrHttpClient());
+ }
const env = (self as any).__env__;
- return `BlobEndpoint=https://${env.DFS_ACCOUNT_NAME}.blob.core.windows.net/;QueueEndpoint=https://${env.DFS_ACCOUNT_NAME}.queue.core.windows.net/;FileEndpoint=https://${env.DFS_ACCOUNT_NAME}.file.core.windows.net/;TableEndpoint=https://${env.DFS_ACCOUNT_NAME}.table.core.windows.net/;SharedAccessSignature=${env.DFS_ACCOUNT_SAS}`;
+ let sasToken: string = env.DFS_ACCOUNT_SAS;
+ // connection string SAS doesn't have the prefix
+ if (sasToken && sasToken.startsWith("?")) {
+ sasToken = sasToken.slice(1);
+ }
+ return `BlobEndpoint=https://${env.DFS_ACCOUNT_NAME}.blob.core.windows.net/;QueueEndpoint=https://${env.DFS_ACCOUNT_NAME}.queue.core.windows.net/;FileEndpoint=https://${env.DFS_ACCOUNT_NAME}.file.core.windows.net/;TableEndpoint=https://${env.DFS_ACCOUNT_NAME}.table.core.windows.net/;SharedAccessSignature=${sasToken}`;
}
diff --git a/sdk/storage/storage-file-datalake/test/utils/index.ts b/sdk/storage/storage-file-datalake/test/utils/index.ts
index 69930beebdae..d739a7e6637f 100644
--- a/sdk/storage/storage-file-datalake/test/utils/index.ts
+++ b/sdk/storage/storage-file-datalake/test/utils/index.ts
@@ -1,16 +1,19 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { TokenCredential } from "@azure/core-http";
+import { TokenCredential } from "@azure/core-auth";
import { env } from "@azure-tools/test-recorder";
import { randomBytes } from "crypto";
import * as fs from "fs";
import * as path from "path";
import { DefaultAzureCredential } from "@azure/identity";
-import { StorageSharedKeyCredential } from "../../src/credentials/StorageSharedKeyCredential";
import { DataLakeServiceClient } from "../../src/DataLakeServiceClient";
-import { newPipeline, StoragePipelineOptions } from "../../src/Pipeline";
+import {
+ newPipeline,
+ StoragePipelineOptions,
+ StorageSharedKeyCredential,
+} from "@azure/storage-blob";
import { getUniqueName, SimpleTokenCredential } from "./testutils.common";
import {
AccountSASPermissions,
@@ -100,7 +103,7 @@ export function getGenericDataLakeServiceClient(
`getGenericDataLakeServiceClient() doesn't support creating DataLakeServiceClient from connection string.`
);
} else {
- const credential = getGenericCredential(accountType) as StorageSharedKeyCredential;
+ const credential = getGenericCredential(accountType);
const pipeline = newPipeline(credential, {
...pipelineOptions,
// Enable logger when debugging
diff --git a/sdk/storage/storage-file-datalake/test/utils/testutils.common.ts b/sdk/storage/storage-file-datalake/test/utils/testutils.common.ts
index 3cc3c0871c0a..efacce2dd153 100644
--- a/sdk/storage/storage-file-datalake/test/utils/testutils.common.ts
+++ b/sdk/storage/storage-file-datalake/test/utils/testutils.common.ts
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
-import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-http";
+import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth";
import { env, isPlaybackMode, RecorderEnvironmentSetup } from "@azure-tools/test-recorder";
export const testPollerProperties = {
@@ -10,6 +10,7 @@ export const testPollerProperties = {
const mockAccountName = "fakestorageaccount";
const mockAccountKey = "aaaaa";
+const mockSas = "fakeSasToken";
export const recorderEnvSetup: RecorderEnvironmentSetup = {
replaceableVariables: {
// Used in record and playback modes
@@ -17,7 +18,7 @@ export const recorderEnvSetup: RecorderEnvironmentSetup = {
// 2. If the env variables are present in the recordings as plain strings, they will be replaced with the provided values in record mode
DFS_ACCOUNT_NAME: `${mockAccountName}`,
DFS_ACCOUNT_KEY: `${mockAccountKey}`,
- DFS_ACCOUNT_SAS: `${mockAccountKey}`,
+ DFS_ACCOUNT_SAS: `${mockSas}`,
DFS_STORAGE_CONNECTION_STRING: `DefaultEndpointsProtocol=https;AccountName=${mockAccountName};AccountKey=${mockAccountKey};EndpointSuffix=core.windows.net`,
ENCRYPTION_SCOPE_1: "antjoscope1",
ENCRYPTION_SCOPE_2: "antjoscope2",
@@ -26,7 +27,7 @@ export const recorderEnvSetup: RecorderEnvironmentSetup = {
DFS_ACCOUNT_TOKEN: `${mockAccountKey}`,
DFS_SOFT_DELETE_ACCOUNT_NAME: `${mockAccountName}`,
DFS_SOFT_DELETE_ACCOUNT_KEY: `${mockAccountKey}`,
- DFS_SOFT_DELETE_ACCOUNT_SAS: `${mockAccountKey}`,
+ DFS_SOFT_DELETE_ACCOUNT_SAS: `${mockSas}`,
AZURE_CLIENT_ID: `${mockAccountKey}`,
AZURE_TENANT_ID: `${mockAccountKey}`,
AZURE_CLIENT_SECRET: `${mockAccountKey}`,