diff --git a/.github/workflows/zowe-cli.yml b/.github/workflows/zowe-cli.yml index 2a48760763..6483dc7837 100644 --- a/.github/workflows/zowe-cli.yml +++ b/.github/workflows/zowe-cli.yml @@ -31,7 +31,7 @@ jobs: strategy: fail-fast: false matrix: - node-version: [12.x, 14.x, 16.x] + node-version: [14.x, 16.x, 18.x] os: [windows-latest, ubuntu-latest, macos-latest] env: @@ -77,7 +77,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: 1.61.0 # See https://github.com/zowe/zowe-cli/issues/1466 override: true - name: Checkout Imperative diff --git a/__tests__/__packages__/cli-test-utils/package.json b/__tests__/__packages__/cli-test-utils/package.json index b7acb1cca4..8a58f37742 100644 --- a/__tests__/__packages__/cli-test-utils/package.json +++ b/__tests__/__packages__/cli-test-utils/package.json @@ -44,7 +44,7 @@ "@types/js-yaml": "^4.0.0", "@types/node": "^14.14.37", "@types/uuid": "^8.3.0", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "typescript": "^4.2.3" }, diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index 7542982028..3024eba4cf 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -11,7 +11,7 @@ "__tests__/__packages__/*" ], "dependencies": { - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/perf-timing": "1.0.7" }, "devDependencies": { @@ -71,7 +71,7 @@ "@types/js-yaml": "^4.0.0", "@types/node": "^14.14.37", "@types/uuid": "^8.3.0", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "typescript": "^4.2.3" }, @@ -5311,10 +5311,9 @@ "link": true }, "node_modules/@zowe/imperative": { - "version": "5.3.3", - "resolved": "https://zowe.jfrog.io/zowe/api/npm/npm-local-release/@zowe/imperative/-/@zowe/imperative-5.3.3.tgz", - "integrity": "sha512-hj/avbLn5aQaFTDAZC6boX5mAbVicWZ1wnFOhEFIR9xcNJw7x74I6a5t6OAaYyc6/Vl1c1zmuOAOyFSsfWmoUg==", - "license": "EPL-2.0", + "version": "5.3.4", + "resolved": "https://zowe.jfrog.io/zowe/api/npm/npm-local-release/@zowe/imperative/-/@zowe/imperative-5.3.4.tgz", + "integrity": "sha512-jD81KyUp6b0sLkHkQC2hirEaQjxDdLFN7qdRpfV1/HX2pZe6a1xThxsKjIEBzcpOEeAkZe+uQ/zQORmv/IC6Eg==", "dependencies": { "@types/lodash-deep": "2.0.0", "@types/yargs": "13.0.4", @@ -24968,7 +24967,7 @@ "license": "EPL-2.0", "dependencies": { "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/perf-timing": "1.0.7", "@zowe/provisioning-for-zowe-sdk": "7.2.5", "@zowe/zos-console-for-zowe-sdk": "7.2.5", @@ -25049,7 +25048,7 @@ "devDependencies": { "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "chalk": "^4.1.0", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -25073,7 +25072,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25102,7 +25101,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25122,7 +25121,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25145,7 +25144,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zos-uss-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -25169,7 +25168,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25189,7 +25188,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25209,7 +25208,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25232,7 +25231,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -25255,7 +25254,7 @@ "@types/node": "^12.12.24", "@types/ssh2": "^0.5.44", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -29542,7 +29541,7 @@ "@types/which": "2.0.1", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/perf-timing": "1.0.7", "@zowe/provisioning-for-zowe-sdk": "7.2.5", "@zowe/zos-console-for-zowe-sdk": "7.2.5", @@ -29592,7 +29591,7 @@ "@types/js-yaml": "^4.0.0", "@types/node": "^14.14.37", "@types/uuid": "^8.3.0", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "find-up": "^5.0.0", "js-yaml": "^4.0.0", @@ -29641,7 +29640,7 @@ "requires": { "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "chalk": "^4.1.0", "comment-json": "4.1.0", "eslint": "^7.32.0", @@ -29653,9 +29652,9 @@ } }, "@zowe/imperative": { - "version": "5.3.3", - "resolved": "https://zowe.jfrog.io/zowe/api/npm/npm-local-release/@zowe/imperative/-/@zowe/imperative-5.3.3.tgz", - "integrity": "sha512-hj/avbLn5aQaFTDAZC6boX5mAbVicWZ1wnFOhEFIR9xcNJw7x74I6a5t6OAaYyc6/Vl1c1zmuOAOyFSsfWmoUg==", + "version": "5.3.4", + "resolved": "https://zowe.jfrog.io/zowe/api/npm/npm-local-release/@zowe/imperative/-/@zowe/imperative-5.3.4.tgz", + "integrity": "sha512-jD81KyUp6b0sLkHkQC2hirEaQjxDdLFN7qdRpfV1/HX2pZe6a1xThxsKjIEBzcpOEeAkZe+uQ/zQORmv/IC6Eg==", "requires": { "@types/lodash-deep": "2.0.0", "@types/yargs": "13.0.4", @@ -29826,7 +29825,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "js-yaml": "3.14.1", "madge": "^4.0.1", @@ -29849,7 +29848,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -29863,7 +29862,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zos-uss-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -29879,7 +29878,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zos-files-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -29894,7 +29893,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -29908,7 +29907,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zosmf-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -29923,7 +29922,7 @@ "@types/node": "^12.12.24", "@types/ssh2": "^0.5.44", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", @@ -29938,7 +29937,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zos-files-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", @@ -29953,7 +29952,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/package.json b/package.json index ebe37ba982..8a87e3a17d 100644 --- a/package.json +++ b/package.json @@ -40,7 +40,7 @@ "prepack": "node scripts/prepareLicenses.js" }, "dependencies": { - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/perf-timing": "1.0.7" }, "devDependencies": { @@ -141,4 +141,4 @@ "jest-junit": { "output": "__tests__/__results__/junit.xml" } -} \ No newline at end of file +} diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index ca6bca935c..c964781dca 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to the Zowe CLI package will be documented in this file. +## Recent Changes + +- Enhancement: Added the `zowe files download data-sets-matching` command to download multiple data sets at once. [#1287](https://github.com/zowe/zowe-cli/issues/1287) + - Note: If you used this command previously in the extended files plug-in for Zowe v1, the `--fail-fast` option now defaults to true which is different from the original behavior. + ## `7.2.4` - BugFix: Fixed the Zowe Daemon binary exiting with an error if the daemon server does not start within 3 seconds. diff --git a/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command/command_download_dsm.sh b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command/command_download_dsm.sh new file mode 100644 index 0000000000..781e1150d8 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command/command_download_dsm.sh @@ -0,0 +1,11 @@ +#!/bin/bash +dsn=$1 +rfj=$2 +set -e + +echo "================Z/OS FILES DOWNLOAD DATASET MATCHING===============" +zowe zos-files download dsm "$1" $2 $3 +if [ $? -gt 0 ] +then + exit $? +fi diff --git a/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command_download_dsm_help.sh b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command_download_dsm_help.sh new file mode 100644 index 0000000000..9983d8d6b7 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__scripts__/command_download_dsm_help.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +echo "================Z/OS FILES DOWNLOAD DATASET MATCHING HELP===============" +zowe zos-files download dsm --help +if [ $? -gt 0 ] +then + exit $? +fi + +echo "================Z/OS FILES DOWNLOAD DATASET MATCHING HELP WITH RFJ===========" +zowe zos-files download dsm --help --rfj +exit $? \ No newline at end of file diff --git a/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__snapshots__/cli.files.download.dsm.integration.test.ts.snap b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__snapshots__/cli.files.download.dsm.integration.test.ts.snap new file mode 100644 index 0000000000..69680acae7 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/__snapshots__/cli.files.download.dsm.integration.test.ts.snap @@ -0,0 +1,245 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Download Dataset Matching should display the help 1`] = ` +"================Z/OS FILES DOWNLOAD DATASET MATCHING HELP=============== + + COMMAND NAME + ------------ + + data-set-matching | dsm + + DESCRIPTION + ----------- + + Download all data sets that match a DSLEVEL pattern (see help below). You can + use several options to qualify which data sets will be skipped and how the + downloaded files will be structured. Data sets that are neither physical + sequential nor partitioned data sets (with members) will be excluded. + + USAGE + ----- + + zowe zos-files download data-set-matching [options] + + POSITIONAL ARGUMENTS + -------------------- + + pattern (string) + + The pattern or patterns to match data sets against. Also known as 'DSLEVEL'. The + following special sequences can be used in the pattern: + %: matches any single character + *: matches any number of characters within a data set name qualifier (e.g. + \\"ibmuser.j*.old\\" matches \\"ibmuser.jcl.old\\" but not \\"ibmuser.jcl.very.old\\") + **: matches any number of characters within any number of data set name + qualifiers (e.g. \\"ibmuser.**.old\\" matches both \\"ibmuser.jcl.old\\" and + \\"ibmuser.jcl.very.old\\") + However, the pattern cannot begin with any of these sequences.You can specify + multiple patterns separated by commas, for example + \\"ibmuser.**.cntl,ibmuser.**.jcl\\" + + OPTIONS + ------- + + --binary | -b (boolean) + + Download the file content in binary mode, which means that no data conversion is + performed. The data transfer process returns each line as-is, without + translation. No delimiters are added between records. + + --directory | -d (string) + + The directory to where you want to save the members. The command creates the + directory for you when it does not already exist. By default, the command + creates a folder structure based on the data set qualifiers. For example, the + data set ibmuser.new.cntl's members are downloaded to ibmuser/new/cntl). + + --encoding | --ec (string) + + Download the file content with encoding mode, which means that data conversion + is performed using the file encoding specified. + + --exclude-patterns | --ep (string) + + Exclude data sets that match these DSLEVEL patterns. Any data sets that match + this pattern will not be downloaded. + + --extension | -e (string) + + Save the local files with a specified file extension. For example, .txt. Or \\"\\" + for no extension. When no extension is specified, .txt is used as the default + file extension. + + --extension-map | --em (string) + + Use this option to map data set names that match your pattern to the desired + extension. A comma delimited key value pairing (e.g. \\"cntl=.jcl,cpgm=.c\\" to map + the last segment of each data set (also known as the \\"low level qualifier\\" to + the desired local file extension). + + --fail-fast | --ff (boolean) + + Set this option to false to continue downloading dataset members if one or more + fail. + + Default value: true + + --max-concurrent-requests | --mcr (number) + + Specifies the maximum number of concurrent z/OSMF REST API requests to download + members. Increasing the value results in faster downloads. However, increasing + the value increases resource consumption on z/OS and can be prone to errors + caused by making too many concurrent requests. If the download process + encounters an error, the following message displays: + The maximum number of TSO address spaces have been created. When you specify 0, + Zowe CLI attempts to download all members at once without a maximum number of + concurrent requests. + + Default value: 1 + + --preserve-original-letter-case | --po (boolean) + + Specifies if the automatically generated directories and files use the original + letter case + + Default value: false + + --record | -r (boolean) + + Download the file content in record mode, which means that no data conversion is + performed and the record length is prepended to the data. The data transfer + process returns each line as-is, without translation. No delimiters are added + between records. Conflicts with binary. + + --volume-serial | --vs (string) + + The volume serial (VOLSER) where the data set resides. You can use this option + at any time. However, the VOLSER is required only when the data set is not + cataloged on the system. A VOLSER is analogous to a drive name on a PC. + + --response-timeout | --rto (number) + + The maximum amount of time in seconds the z/OSMF Files TSO servlet should run + before returning a response. Any request exceeding this amount of time will be + terminated and return an error. Allowed values: 5 - 600 + + ZOSMF CONNECTION OPTIONS + ------------------------ + + --host | -H (string) + + The z/OSMF server host name. + + --port | -P (number) + + The z/OSMF server port. + + Default value: 443 + + --user | -u (string) + + Mainframe (z/OSMF) user name, which can be the same as your TSO login. + + --password | --pass | --pw (string) + + Mainframe (z/OSMF) password, which can be the same as your TSO password. + + --reject-unauthorized | --ru (boolean) + + Reject self-signed certificates. + + Default value: true + + --base-path | --bp (string) + + The base path for your API mediation layer instance. Specify this option to + prepend the base path to all z/OSMF resources when making REST requests. Do not + specify this option if you are not using an API mediation layer. + + --protocol (string) + + The protocol used (HTTP or HTTPS) + + Default value: https + Allowed values: http, https + + --cert-file (local file path) + + The file path to a certificate file to use for authentication + + --cert-key-file (local file path) + + The file path to a certificate key file to use for authentication + + PROFILE OPTIONS + --------------- + + --zosmf-profile | --zosmf-p (string) + + The name of a (zosmf) profile to load for this command execution. + + --base-profile | --base-p (string) + + The name of a (base) profile to load for this command execution. + + BASE CONNECTION OPTIONS + ----------------------- + + --token-type | --tt (string) + + The type of token to get and use for the API. Omit this option to use the + default token type, which is provided by 'zowe auth login'. + + --token-value | --tv (string) + + The value of the token to pass to the API. + + GLOBAL OPTIONS + -------------- + + --show-inputs-only (boolean) + + Show command inputs and do not run the command + + --response-format-json | --rfj (boolean) + + Produce JSON formatted data from a command + + --help | -h (boolean) + + Display help text + + --help-examples (boolean) + + Display examples for all the commands in a group + + --help-web | --hw (boolean) + + Display HTML help in browser + + EXAMPLES + -------- + + - Download all data sets beginning with \\"ibmuser\\" and ending + with \\".cntl\\" or \\".jcl\\" to the local directory \\"jcl\\" to files with the extension + \\".jcl\\": + + $ zowe zos-files download data-set-matching \\"ibmuser.**.cntl, ibmuser.**.jcl\\" --directory jcl --extension .jcl + + - Download all data sets that begin with + \\"ibmuser.public.project\\" or \\"ibmuser.project.private\\", excluding those that end + in \\"lib\\" to the local directory \\"project\\", providing a custom mapping of data + set low level qualifier to local file extension: + + $ zowe zos-files download data-set-matching \\"ibmuser.public.project.*, ibmuser.project.private.*\\" --exclude-patterns \\"ibmuser.public.**.*lib\\" --directory project --extension-map cpgm=c,asmpgm=asm,java=java,chdr=c,jcl=jcl,cntl=jcl + +================Z/OS FILES DOWNLOAD DATASET MATCHING HELP WITH RFJ=========== +{ + \\"success\\": true, + \\"exitCode\\": 0, + \\"message\\": \\"The help was constructed for command: data-set-matching.\\", + \\"stdout\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set-matching | dsm\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Download all data sets that match a DSLEVEL pattern (see help below). You can\\\\n use several options to qualify which data sets will be skipped and how the\\\\n downloaded files will be structured. Data sets that are neither physical\\\\n sequential nor partitioned data sets (with members) will be excluded.\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-files download data-set-matching [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n pattern\\\\t\\\\t (string)\\\\n\\\\n The pattern or patterns to match data sets against. Also known as 'DSLEVEL'. The\\\\n following special sequences can be used in the pattern:\\\\n %: matches any single character\\\\n *: matches any number of characters within a data set name qualifier (e.g.\\\\n \\\\\\"ibmuser.j*.old\\\\\\" matches \\\\\\"ibmuser.jcl.old\\\\\\" but not \\\\\\"ibmuser.jcl.very.old\\\\\\")\\\\n **: matches any number of characters within any number of data set name\\\\n qualifiers (e.g. \\\\\\"ibmuser.**.old\\\\\\" matches both \\\\\\"ibmuser.jcl.old\\\\\\" and\\\\n \\\\\\"ibmuser.jcl.very.old\\\\\\")\\\\n However, the pattern cannot begin with any of these sequences.You can specify\\\\n multiple patterns separated by commas, for example\\\\n \\\\\\"ibmuser.**.cntl,ibmuser.**.jcl\\\\\\"\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --binary | -b (boolean)\\\\n\\\\n Download the file content in binary mode, which means that no data conversion is\\\\n performed. The data transfer process returns each line as-is, without\\\\n translation. No delimiters are added between records.\\\\n\\\\n --directory | -d (string)\\\\n\\\\n The directory to where you want to save the members. The command creates the\\\\n directory for you when it does not already exist. By default, the command\\\\n creates a folder structure based on the data set qualifiers. For example, the\\\\n data set ibmuser.new.cntl's members are downloaded to ibmuser/new/cntl).\\\\n\\\\n --encoding | --ec (string)\\\\n\\\\n Download the file content with encoding mode, which means that data conversion\\\\n is performed using the file encoding specified.\\\\n\\\\n --exclude-patterns | --ep (string)\\\\n\\\\n Exclude data sets that match these DSLEVEL patterns. Any data sets that match\\\\n this pattern will not be downloaded.\\\\n\\\\n --extension | -e (string)\\\\n\\\\n Save the local files with a specified file extension. For example, .txt. Or \\\\\\"\\\\\\"\\\\n for no extension. When no extension is specified, .txt is used as the default\\\\n file extension.\\\\n\\\\n --extension-map | --em (string)\\\\n\\\\n Use this option to map data set names that match your pattern to the desired\\\\n extension. A comma delimited key value pairing (e.g. \\\\\\"cntl=.jcl,cpgm=.c\\\\\\" to map\\\\n the last segment of each data set (also known as the \\\\\\"low level qualifier\\\\\\" to\\\\n the desired local file extension).\\\\n\\\\n --fail-fast | --ff (boolean)\\\\n\\\\n Set this option to false to continue downloading dataset members if one or more\\\\n fail.\\\\n\\\\n Default value: true\\\\n\\\\n --max-concurrent-requests | --mcr (number)\\\\n\\\\n Specifies the maximum number of concurrent z/OSMF REST API requests to download\\\\n members. Increasing the value results in faster downloads. However, increasing\\\\n the value increases resource consumption on z/OS and can be prone to errors\\\\n caused by making too many concurrent requests. If the download process\\\\n encounters an error, the following message displays:\\\\n The maximum number of TSO address spaces have been created. When you specify 0,\\\\n Zowe CLI attempts to download all members at once without a maximum number of\\\\n concurrent requests.\\\\n\\\\n Default value: 1\\\\n\\\\n --preserve-original-letter-case | --po (boolean)\\\\n\\\\n Specifies if the automatically generated directories and files use the original\\\\n letter case\\\\n\\\\n Default value: false\\\\n\\\\n --record | -r (boolean)\\\\n\\\\n Download the file content in record mode, which means that no data conversion is\\\\n performed and the record length is prepended to the data. The data transfer\\\\n process returns each line as-is, without translation. No delimiters are added\\\\n between records. Conflicts with binary.\\\\n\\\\n --volume-serial | --vs (string)\\\\n\\\\n The volume serial (VOLSER) where the data set resides. You can use this option\\\\n at any time. However, the VOLSER is required only when the data set is not\\\\n cataloged on the system. A VOLSER is analogous to a drive name on a PC.\\\\n\\\\n --response-timeout | --rto (number)\\\\n\\\\n The maximum amount of time in seconds the z/OSMF Files TSO servlet should run\\\\n before returning a response. Any request exceeding this amount of time will be\\\\n terminated and return an error. Allowed values: 5 - 600\\\\n\\\\n ZOSMF CONNECTION OPTIONS\\\\n ------------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The z/OSMF server host name.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The z/OSMF server port.\\\\n\\\\n Default value: 443\\\\n\\\\n --user | -u (string)\\\\n\\\\n Mainframe (z/OSMF) user name, which can be the same as your TSO login.\\\\n\\\\n --password | --pass | --pw (string)\\\\n\\\\n Mainframe (z/OSMF) password, which can be the same as your TSO password.\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates.\\\\n\\\\n Default value: true\\\\n\\\\n --base-path | --bp (string)\\\\n\\\\n The base path for your API mediation layer instance. Specify this option to\\\\n prepend the base path to all z/OSMF resources when making REST requests. Do not\\\\n specify this option if you are not using an API mediation layer.\\\\n\\\\n --protocol (string)\\\\n\\\\n The protocol used (HTTP or HTTPS)\\\\n\\\\n Default value: https\\\\n Allowed values: http, https\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zosmf-profile | --zosmf-p (string)\\\\n\\\\n The name of a (zosmf) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-examples (boolean)\\\\n\\\\n Display examples for all the commands in a group\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Download all data sets beginning with \\\\\\"ibmuser\\\\\\" and ending\\\\n with \\\\\\".cntl\\\\\\" or \\\\\\".jcl\\\\\\" to the local directory \\\\\\"jcl\\\\\\" to files with the extension\\\\n \\\\\\".jcl\\\\\\":\\\\n\\\\n $ zowe zos-files download data-set-matching \\\\\\"ibmuser.**.cntl, ibmuser.**.jcl\\\\\\" --directory jcl --extension .jcl\\\\n\\\\n - Download all data sets that begin with\\\\n \\\\\\"ibmuser.public.project\\\\\\" or \\\\\\"ibmuser.project.private\\\\\\", excluding those that end\\\\n in \\\\\\"lib\\\\\\" to the local directory \\\\\\"project\\\\\\", providing a custom mapping of data\\\\n set low level qualifier to local file extension:\\\\n\\\\n $ zowe zos-files download data-set-matching \\\\\\"ibmuser.public.project.*, ibmuser.project.private.*\\\\\\" --exclude-patterns \\\\\\"ibmuser.public.**.*lib\\\\\\" --directory project --extension-map cpgm=c,asmpgm=asm,java=java,chdr=c,jcl=jcl,cntl=jcl\\\\n\\\\n\\", + \\"stderr\\": \\"\\", + \\"data\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set-matching | dsm\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Download all data sets that match a DSLEVEL pattern (see help below). You can\\\\n use several options to qualify which data sets will be skipped and how the\\\\n downloaded files will be structured. Data sets that are neither physical\\\\n sequential nor partitioned data sets (with members) will be excluded.\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-files download data-set-matching [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n pattern\\\\t\\\\t (string)\\\\n\\\\n The pattern or patterns to match data sets against. Also known as 'DSLEVEL'. The\\\\n following special sequences can be used in the pattern:\\\\n %: matches any single character\\\\n *: matches any number of characters within a data set name qualifier (e.g.\\\\n \\\\\\"ibmuser.j*.old\\\\\\" matches \\\\\\"ibmuser.jcl.old\\\\\\" but not \\\\\\"ibmuser.jcl.very.old\\\\\\")\\\\n **: matches any number of characters within any number of data set name\\\\n qualifiers (e.g. \\\\\\"ibmuser.**.old\\\\\\" matches both \\\\\\"ibmuser.jcl.old\\\\\\" and\\\\n \\\\\\"ibmuser.jcl.very.old\\\\\\")\\\\n However, the pattern cannot begin with any of these sequences.You can specify\\\\n multiple patterns separated by commas, for example\\\\n \\\\\\"ibmuser.**.cntl,ibmuser.**.jcl\\\\\\"\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --binary | -b (boolean)\\\\n\\\\n Download the file content in binary mode, which means that no data conversion is\\\\n performed. The data transfer process returns each line as-is, without\\\\n translation. No delimiters are added between records.\\\\n\\\\n --directory | -d (string)\\\\n\\\\n The directory to where you want to save the members. The command creates the\\\\n directory for you when it does not already exist. By default, the command\\\\n creates a folder structure based on the data set qualifiers. For example, the\\\\n data set ibmuser.new.cntl's members are downloaded to ibmuser/new/cntl).\\\\n\\\\n --encoding | --ec (string)\\\\n\\\\n Download the file content with encoding mode, which means that data conversion\\\\n is performed using the file encoding specified.\\\\n\\\\n --exclude-patterns | --ep (string)\\\\n\\\\n Exclude data sets that match these DSLEVEL patterns. Any data sets that match\\\\n this pattern will not be downloaded.\\\\n\\\\n --extension | -e (string)\\\\n\\\\n Save the local files with a specified file extension. For example, .txt. Or \\\\\\"\\\\\\"\\\\n for no extension. When no extension is specified, .txt is used as the default\\\\n file extension.\\\\n\\\\n --extension-map | --em (string)\\\\n\\\\n Use this option to map data set names that match your pattern to the desired\\\\n extension. A comma delimited key value pairing (e.g. \\\\\\"cntl=.jcl,cpgm=.c\\\\\\" to map\\\\n the last segment of each data set (also known as the \\\\\\"low level qualifier\\\\\\" to\\\\n the desired local file extension).\\\\n\\\\n --fail-fast | --ff (boolean)\\\\n\\\\n Set this option to false to continue downloading dataset members if one or more\\\\n fail.\\\\n\\\\n Default value: true\\\\n\\\\n --max-concurrent-requests | --mcr (number)\\\\n\\\\n Specifies the maximum number of concurrent z/OSMF REST API requests to download\\\\n members. Increasing the value results in faster downloads. However, increasing\\\\n the value increases resource consumption on z/OS and can be prone to errors\\\\n caused by making too many concurrent requests. If the download process\\\\n encounters an error, the following message displays:\\\\n The maximum number of TSO address spaces have been created. When you specify 0,\\\\n Zowe CLI attempts to download all members at once without a maximum number of\\\\n concurrent requests.\\\\n\\\\n Default value: 1\\\\n\\\\n --preserve-original-letter-case | --po (boolean)\\\\n\\\\n Specifies if the automatically generated directories and files use the original\\\\n letter case\\\\n\\\\n Default value: false\\\\n\\\\n --record | -r (boolean)\\\\n\\\\n Download the file content in record mode, which means that no data conversion is\\\\n performed and the record length is prepended to the data. The data transfer\\\\n process returns each line as-is, without translation. No delimiters are added\\\\n between records. Conflicts with binary.\\\\n\\\\n --volume-serial | --vs (string)\\\\n\\\\n The volume serial (VOLSER) where the data set resides. You can use this option\\\\n at any time. However, the VOLSER is required only when the data set is not\\\\n cataloged on the system. A VOLSER is analogous to a drive name on a PC.\\\\n\\\\n --response-timeout | --rto (number)\\\\n\\\\n The maximum amount of time in seconds the z/OSMF Files TSO servlet should run\\\\n before returning a response. Any request exceeding this amount of time will be\\\\n terminated and return an error. Allowed values: 5 - 600\\\\n\\\\n ZOSMF CONNECTION OPTIONS\\\\n ------------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The z/OSMF server host name.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The z/OSMF server port.\\\\n\\\\n Default value: 443\\\\n\\\\n --user | -u (string)\\\\n\\\\n Mainframe (z/OSMF) user name, which can be the same as your TSO login.\\\\n\\\\n --password | --pass | --pw (string)\\\\n\\\\n Mainframe (z/OSMF) password, which can be the same as your TSO password.\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates.\\\\n\\\\n Default value: true\\\\n\\\\n --base-path | --bp (string)\\\\n\\\\n The base path for your API mediation layer instance. Specify this option to\\\\n prepend the base path to all z/OSMF resources when making REST requests. Do not\\\\n specify this option if you are not using an API mediation layer.\\\\n\\\\n --protocol (string)\\\\n\\\\n The protocol used (HTTP or HTTPS)\\\\n\\\\n Default value: https\\\\n Allowed values: http, https\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zosmf-profile | --zosmf-p (string)\\\\n\\\\n The name of a (zosmf) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-examples (boolean)\\\\n\\\\n Display examples for all the commands in a group\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Download all data sets beginning with \\\\\\"ibmuser\\\\\\" and ending\\\\n with \\\\\\".cntl\\\\\\" or \\\\\\".jcl\\\\\\" to the local directory \\\\\\"jcl\\\\\\" to files with the extension\\\\n \\\\\\".jcl\\\\\\":\\\\n\\\\n $ zowe zos-files download data-set-matching \\\\\\"ibmuser.**.cntl, ibmuser.**.jcl\\\\\\" --directory jcl --extension .jcl\\\\n\\\\n - Download all data sets that begin with\\\\n \\\\\\"ibmuser.public.project\\\\\\" or \\\\\\"ibmuser.project.private\\\\\\", excluding those that end\\\\n in \\\\\\"lib\\\\\\" to the local directory \\\\\\"project\\\\\\", providing a custom mapping of data\\\\n set low level qualifier to local file extension:\\\\n\\\\n $ zowe zos-files download data-set-matching \\\\\\"ibmuser.public.project.*, ibmuser.project.private.*\\\\\\" --exclude-patterns \\\\\\"ibmuser.public.**.*lib\\\\\\" --directory project --extension-map cpgm=c,asmpgm=asm,java=java,chdr=c,jcl=jcl,cntl=jcl\\\\n\\\\n\\" +}" +`; diff --git a/packages/cli/__tests__/zosfiles/__integration__/download/dsm/cli.files.download.dsm.integration.test.ts b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/cli.files.download.dsm.integration.test.ts new file mode 100644 index 0000000000..03a0ff0b48 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__integration__/download/dsm/cli.files.download.dsm.integration.test.ts @@ -0,0 +1,67 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import * as path from "path"; +import { ITestEnvironment, runCliScript } from "@zowe/cli-test-utils"; +import { TestEnvironment } from "../../../../../../../__tests__/__src__/environment/TestEnvironment"; +import { ITestPropertiesSchema } from "../../../../../../../__tests__/__src__/properties/ITestPropertiesSchema"; + +// Test Environment populated in the beforeAll(); +let TEST_ENVIRONMENT: ITestEnvironment; + +describe("Download Dataset Matching", () => { + + beforeAll(async () => { + TEST_ENVIRONMENT = await TestEnvironment.setUp({ + testName: "download_data_set_matching", + skipProperties: true + }); + }); + + afterAll(async () => { + await TestEnvironment.cleanUp(TEST_ENVIRONMENT); + }); + + it("should display the help", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm_help.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT); + + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toMatchSnapshot(); + }); + + it("should fail due to missing pattern", async () => { + const shellScript = path.join(__dirname, "__scripts__", "command", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [""]); + expect(response.status).toBe(1); + expect(response.stderr.toString()).toContain("pattern"); + expect(response.stderr.toString()).toContain("Missing Positional"); + }); + + it("should fail due to specifying both binary and record", async () => { + const shellScript = path.join(__dirname, "__scripts__", "command", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, ["test", "--binary", "--record"]); + expect(response.status).toBe(1); + expect(response.stderr.toString()).toContain("following options conflict"); + expect(response.stderr.toString()).toContain("--record"); + expect(response.stderr.toString()).toContain("--binary"); + }); + + it("should fail due to specifying both extension and extensionMap", async () => { + const shellScript = path.join(__dirname, "__scripts__", "command", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, ["test", "--extension", "--extension-map"]); + expect(response.status).toBe(1); + expect(response.stderr.toString()).toContain("following options conflict"); + expect(response.stderr.toString()).toContain("--extension"); + expect(response.stderr.toString()).toContain("--extension-map"); + }); +}); diff --git a/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm.sh b/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm.sh new file mode 100644 index 0000000000..45aa30b4a9 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +zowe zos-files download dsm $* +if [ $? -gt 0 ] +then + exit $? +fi diff --git a/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm_fully_qualified.sh b/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm_fully_qualified.sh new file mode 100644 index 0000000000..e451f8d362 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__system__/download/dsm/__scripts__/command_download_dsm_fully_qualified.sh @@ -0,0 +1,8 @@ +#!/bin/bash +HOST=$2 +PORT=$3 +USER=$4 +PASS=$5 + +zowe zos-files download dsm "$1" --host $HOST --port $PORT --user $USER --password $PASS --ru=false +exit $? \ No newline at end of file diff --git a/packages/cli/__tests__/zosfiles/__system__/download/dsm/cli.files.download.dsm.system.test.ts b/packages/cli/__tests__/zosfiles/__system__/download/dsm/cli.files.download.dsm.system.test.ts new file mode 100644 index 0000000000..ca1d9b381d --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__system__/download/dsm/cli.files.download.dsm.system.test.ts @@ -0,0 +1,297 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { Session } from "@zowe/imperative"; +import * as path from "path"; +import { ITestEnvironment, runCliScript } from "@zowe/cli-test-utils"; +import { TestEnvironment } from "../../../../../../../__tests__/__src__/environment/TestEnvironment"; +import { ITestPropertiesSchema } from "../../../../../../../__tests__/__src__/properties/ITestPropertiesSchema"; +import { getUniqueDatasetName } from "../../../../../../../__tests__/__src__/TestUtils"; +import { Create, CreateDataSetTypeEnum, Delete, Upload } from "@zowe/zos-files-for-zowe-sdk"; + +let REAL_SESSION: Session; +// Test Environment populated in the beforeAll(); +let TEST_ENVIRONMENT: ITestEnvironment; +let TEST_ENVIRONMENT_NO_PROF: ITestEnvironment; +let defaultSystem: ITestPropertiesSchema; +let pattern: string; +let dsnames: string[]; +const testString = "test"; + +describe("Download Dataset Matching", () => { + + beforeAll(async () => { + TEST_ENVIRONMENT = await TestEnvironment.setUp({ + tempProfileTypes: ["zosmf"], + testName: "download_data_set_matching" + }); + + defaultSystem = TEST_ENVIRONMENT.systemTestProperties; + + REAL_SESSION = TestEnvironment.createZosmfSession(TEST_ENVIRONMENT); + + const dsnPrefix = getUniqueDatasetName(defaultSystem.zosmf.user); + dsnames = [dsnPrefix, dsnPrefix+".T01", dsnPrefix+".T02", dsnPrefix+".T03"]; + pattern = dsnPrefix + "*"; + }); + + afterAll(async () => { + await TestEnvironment.cleanUp(TEST_ENVIRONMENT); + }); + describe("without profiles", () => { + let defaultSys: ITestPropertiesSchema; + + // Create the unique test environment + beforeAll(async () => { + TEST_ENVIRONMENT_NO_PROF = await TestEnvironment.setUp({ + testName: "zos_files_download_dsm_without_profile" + }); + + defaultSys = TEST_ENVIRONMENT_NO_PROF.systemTestProperties; + }); + + afterAll(async () => { + await TestEnvironment.cleanUp(TEST_ENVIRONMENT_NO_PROF); + }); + + beforeEach(async () => { + for (const dsn of dsnames) { + await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_PARTITIONED, dsn); + await Upload.bufferToDataSet(REAL_SESSION, Buffer.from(testString), `${dsn}(${testString})`); + } + }); + + afterEach(async () => { + for (const dsn of dsnames) { + await Delete.dataSet(REAL_SESSION, dsn); + } + }); + + it("should download matching datasets", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm_fully_qualified.sh"); + + const ZOWE_OPT_BASE_PATH = "ZOWE_OPT_BASE_PATH"; + + // if API Mediation layer is being used (basePath has a value) then + // set an ENVIRONMENT variable to be used by zowe. + if (defaultSys.zosmf.basePath != null) { + TEST_ENVIRONMENT_NO_PROF.env[ZOWE_OPT_BASE_PATH] = defaultSys.zosmf.basePath; + } + + const response = runCliScript(shellScript, + TEST_ENVIRONMENT_NO_PROF, + [pattern, + defaultSys.zosmf.host, + defaultSys.zosmf.port, + defaultSys.zosmf.user, + defaultSys.zosmf.password]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + }); + + describe("Success scenarios - PDS", () => { + beforeEach(async () => { + for (const dsn of dsnames) { + await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_PARTITIONED, dsn); + await Upload.bufferToDataSet(REAL_SESSION, Buffer.from(testString), `${dsn}(${testString})`); + } + }); + + afterEach(async () => { + for (const dsn of dsnames) { + await Delete.dataSet(REAL_SESSION, dsn); + } + }); + + it("should download data sets matching a given pattern", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern in binary format", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--binary"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern in record format", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--record"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with response timeout", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--responseTimeout 5"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with --max-concurrent-requests 2", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--max-concurrent-requests", 2]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with response-format-json flag", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--rfj"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern to specified directory", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const testDir = "test/folder"; + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "-d", testDir]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ${testDir}`); + }); + + it("should download data sets matching a given pattern with extension = \".jcl\"", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const testDir = "test/folder"; + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--rfj", "-d", testDir, "-e", ".jcl"]); + + const result = JSON.parse(response.stdout.toString()); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(result.stdout).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(result.stdout).toContain(`${dsnames.length} data set(s) downloaded successfully to ${testDir}`); + + for (const apiResp of result.data.apiResponse) { + expect(apiResp.status).toContain("Data set downloaded successfully."); + expect(apiResp.status).toContain("Destination:"); + expect(apiResp.status).toContain(testDir); + expect(apiResp.status).toContain("Members: TEST;"); + } + }); + }); + + describe("Success scenarios - PS", () => { + beforeEach(async () => { + for (const dsn of dsnames) { + await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_SEQUENTIAL, dsn); + await Upload.bufferToDataSet(REAL_SESSION, Buffer.from(testString), `${dsn}`); + } + }); + + afterEach(async () => { + for (const dsn of dsnames) { + await Delete.dataSet(REAL_SESSION, dsn); + } + }); + + it("should download data sets matching a given pattern", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern in binary format", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--binary"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern in record format", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--record"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with response timeout", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--responseTimeout 5"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with --max-concurrent-requests 2", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--max-concurrent-requests", 2]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern with response-format-json flag", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--rfj"]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ./`); + }); + + it("should download data sets matching a given pattern to specified directory", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const testDir = "test/folder"; + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "-d", testDir]); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(response.stdout.toString()).toContain(`${dsnames.length} data set(s) downloaded successfully to ${testDir}`); + }); + + it("should download data sets matching a given pattern with extension = \".jcl\"", () => { + const shellScript = path.join(__dirname, "__scripts__", "command_download_dsm.sh"); + const testDir = "test/folder"; + const response = runCliScript(shellScript, TEST_ENVIRONMENT, [pattern, "--rfj", "-d", testDir, "-e", ".jcl"]); + + const result = JSON.parse(response.stdout.toString()); + expect(response.stderr.toString()).toBe(""); + expect(response.status).toBe(0); + expect(result.stdout).toContain(`${dsnames.length} data set(s) were found matching pattern`); + expect(result.stdout).toContain(`${dsnames.length} data set(s) downloaded successfully to ${testDir}`); + + for (const apiResp of result.data.apiResponse) { + expect(apiResp.status).toContain("Data set downloaded successfully."); + expect(apiResp.status).toContain("Destination:"); + expect(apiResp.status).toContain(testDir); + } + }); + }); +}); diff --git a/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.definition.unit.test.ts b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.definition.unit.test.ts new file mode 100644 index 0000000000..6585eda1a6 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.definition.unit.test.ts @@ -0,0 +1,37 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { ICommandDefinition } from "@zowe/imperative"; + +describe("zos-files download dsm command definition", () => { + it ("should not have changed", () => { + const definition: ICommandDefinition = + require("../../../../../src/zosfiles/download/dsm/DataSetMatching.definition").DataSetMatchingDefinition; + + expect(definition).toBeDefined(); + + // Should not contain children since this is a command + expect(definition.children).toBeUndefined(); + + // Should require a zosmf profile + expect(definition.profile.optional).toEqual(["zosmf"]); + + // Should only contain one positional + expect(definition.positionals.length).toEqual(1); + + // The positional should be required + expect(definition.positionals[0].required).toBeTruthy(); + + // Should not change + expect(definition.options).toMatchSnapshot(); + expect(definition.examples).toMatchSnapshot(); + }); +}); diff --git a/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.handler.unit.test.ts b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.handler.unit.test.ts new file mode 100644 index 0000000000..594b73562d --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/DataSetMatching.handler.unit.test.ts @@ -0,0 +1,209 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { IHandlerParameters, Session } from "@zowe/imperative"; +import { Download, IDownloadOptions, IDsmListOptions, List } from "@zowe/zos-files-for-zowe-sdk"; +import * as DataSetMatchingDefinition from "../../../../../src/zosfiles/download/dsm/DataSetMatching.definition"; +import * as DataSetMatchingHandler from "../../../../../src/zosfiles/download/dsm/DataSetMatching.handler"; +import { UNIT_TEST_ZOSMF_PROF_OPTS, UNIT_TEST_PROFILES_ZOSMF } from "../../../../../../../__tests__/__src__/mocks/ZosmfProfileMock"; +import { mockHandlerParameters } from "@zowe/cli-test-utils"; + +const DEFAULT_PARAMETERS: IHandlerParameters = mockHandlerParameters({ + arguments: UNIT_TEST_ZOSMF_PROF_OPTS, + positionals: ["zos-jobs", "download", "output"], + definition: DataSetMatchingDefinition.DataSetMatchingDefinition, + profiles: UNIT_TEST_PROFILES_ZOSMF +}); + +const fakeListOptions: IDsmListOptions = { + task: { + percentComplete: 0, + stageName: 0, + statusMessage: "Searching for data sets" + } +}; + +const fakeDownloadOptions: IDownloadOptions = { + binary: undefined, + directory: undefined, + encoding: undefined, + excludePatterns: undefined, + extension: undefined, + extensionMap: undefined, + failFast: undefined, + maxConcurrentRequests: undefined, + preserveOriginalLetterCase: undefined, + record: undefined, + responseTimeout: undefined, + volume: undefined, + task: { + percentComplete: 0, + stageName: 0, + statusMessage: "Downloading data sets" + } +}; + +describe("Download DataSetMatching handler", () => { + it("should download matching datasets if requested", async () => { + const pattern = "testing"; + const fakeListResponse = [{ dsname: "HLQ." + pattern }]; + let passedSession: Session = null; + List.dataSetsMatchingPattern = jest.fn((session) => { + passedSession = session; + return { + success: true, + commandResponse: "listed", + apiResponse: fakeListResponse + }; + }); + Download.allDataSets = jest.fn((session) => { + return { + success: true, + commandResponse: "downloaded" + }; + }); + + const handler = new DataSetMatchingHandler.default(); + const params = Object.assign({}, ...[DEFAULT_PARAMETERS]); + params.arguments = Object.assign({}, ...[DEFAULT_PARAMETERS.arguments]); + params.arguments.pattern = pattern; + await handler.process(params); + + expect(List.dataSetsMatchingPattern).toHaveBeenCalledTimes(1); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledWith(passedSession, [pattern], { ...fakeListOptions }); + expect(Download.allDataSets).toHaveBeenCalledTimes(1); + expect(Download.allDataSets).toHaveBeenCalledWith(passedSession, fakeListResponse, { ...fakeDownloadOptions }); + }); + + it("should handle generation of an extension map", async () => { + const pattern = "testing"; + const fakeListResponse = [{ dsname: "HLQ." + pattern }]; + const extensionMap = "CNTL=JCL,PARMLIB=JCL,LOADLIB=JCL"; + let passedSession: Session = null; + List.dataSetsMatchingPattern = jest.fn((session) => { + passedSession = session; + return { + success: true, + commandResponse: "listed", + apiResponse: fakeListResponse + }; + }); + Download.allDataSets = jest.fn((session) => { + return { + success: true, + commandResponse: "downloaded" + }; + }); + + const handler = new DataSetMatchingHandler.default(); + const params = Object.assign({}, ...[DEFAULT_PARAMETERS]); + params.arguments = Object.assign({}, ...[DEFAULT_PARAMETERS.arguments]); + params.arguments.pattern = pattern; + params.arguments.extensionMap = extensionMap; + await handler.process(params); + + expect(List.dataSetsMatchingPattern).toHaveBeenCalledTimes(1); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledWith(passedSession, [pattern], { ...fakeListOptions }); + expect(Download.allDataSets).toHaveBeenCalledTimes(1); + expect(Download.allDataSets).toHaveBeenCalledWith(passedSession, fakeListResponse, { + ...fakeDownloadOptions, + extensionMap: { cntl: "jcl", parmlib: "jcl", loadlib: "jcl" } + }); + }); + + it("should gracefully handle an extension map parsing error", async () => { + const pattern = "testing"; + const extensionMap = "CNTL=JCL,PARMLIB-JCL,LOADLIB=JCL"; + let caughtError; + List.dataSetsMatchingPattern = jest.fn(); + Download.allDataSets = jest.fn(); + + const handler = new DataSetMatchingHandler.default(); + const params = Object.assign({}, ...[DEFAULT_PARAMETERS]); + params.arguments = Object.assign({}, ...[DEFAULT_PARAMETERS.arguments]); + params.arguments.pattern = pattern; + params.arguments.extensionMap = extensionMap; + try { + await handler.process(params); + } catch (error) { + caughtError = error; + } + + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain("An error occurred processing the extension map"); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledTimes(0); + expect(Download.allDataSets).toHaveBeenCalledTimes(0); + }); + + it("should handle generation of an exclusion list", async () => { + const pattern = "testing"; + const fakeListResponse = [{ dsname: "HLQ." + pattern }]; + const excludePatterns = "TEST.EXCLUDE.**.CNTL"; + let passedSession: Session = null; + List.dataSetsMatchingPattern = jest.fn((session) => { + passedSession = session; + return { + success: true, + commandResponse: "listed", + apiResponse: fakeListResponse + }; + }); + Download.allDataSets = jest.fn((session) => { + return { + success: true, + commandResponse: "downloaded" + }; + }); + + const handler = new DataSetMatchingHandler.default(); + const params = Object.assign({}, ...[DEFAULT_PARAMETERS]); + params.arguments = Object.assign({}, ...[DEFAULT_PARAMETERS.arguments]); + params.arguments.pattern = pattern; + params.arguments.excludePatterns = excludePatterns; + await handler.process(params); + + expect(List.dataSetsMatchingPattern).toHaveBeenCalledTimes(1); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledWith(passedSession, [pattern], { + ...fakeListOptions, + excludePatterns: [excludePatterns] + }); + expect(Download.allDataSets).toHaveBeenCalledTimes(1); + expect(Download.allDataSets).toHaveBeenCalledWith(passedSession, fakeListResponse, { ...fakeDownloadOptions }); + }); + + it("should gracefully handle an error from the z/OSMF List API", async () => { + const errorMsg = "i haz bad data set"; + const pattern = "testing"; + let caughtError; + let passedSession: Session = null; + List.dataSetsMatchingPattern = jest.fn((session) => { + passedSession = session; + throw new Error(errorMsg); + }); + Download.allDataSets = jest.fn(); + + const handler = new DataSetMatchingHandler.default(); + const params = Object.assign({}, ...[DEFAULT_PARAMETERS]); + params.arguments = Object.assign({}, ...[DEFAULT_PARAMETERS.arguments]); + params.arguments.pattern = pattern; + try { + await handler.process(params); + } catch (error) { + caughtError = error; + } + + expect(caughtError).toBeDefined(); + expect(caughtError.message).toBe(errorMsg); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledTimes(1); + expect(List.dataSetsMatchingPattern).toHaveBeenCalledWith(passedSession, [pattern], { ...fakeListOptions }); + expect(Download.allDataSets).toHaveBeenCalledTimes(0); + }); +}); diff --git a/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.definition.unit.test.ts.snap b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.definition.unit.test.ts.snap new file mode 100644 index 0000000000..53567f7df1 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.definition.unit.test.ts.snap @@ -0,0 +1,124 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`zos-files download dsm command definition should not have changed 1`] = ` +Array [ + Object { + "aliases": Array [ + "b", + ], + "description": "Download the file content in binary mode, which means that no data conversion is performed. The data transfer process returns each line as-is, without translation. No delimiters are added between records.", + "name": "binary", + "type": "boolean", + }, + Object { + "aliases": Array [ + "d", + ], + "description": "The directory to where you want to save the members. The command creates the directory for you when it does not already exist. By default, the command creates a folder structure based on the data set qualifiers. For example, the data set ibmuser.new.cntl's members are downloaded to ibmuser/new/cntl).", + "name": "directory", + "type": "string", + }, + Object { + "aliases": Array [ + "ec", + ], + "description": "Download the file content with encoding mode, which means that data conversion is performed using the file encoding specified.", + "name": "encoding", + "type": "string", + }, + Object { + "aliases": Array [ + "ep", + ], + "description": "Exclude data sets that match these DSLEVEL patterns. Any data sets that match this pattern will not be downloaded.", + "name": "exclude-patterns", + "type": "string", + }, + Object { + "aliases": Array [ + "e", + ], + "description": "Save the local files with a specified file extension. For example, .txt. Or \\"\\" for no extension. When no extension is specified, .txt is used as the default file extension.", + "name": "extension", + "type": "stringOrEmpty", + }, + Object { + "aliases": Array [ + "em", + ], + "conflictsWith": Array [ + "extension", + ], + "description": "Use this option to map data set names that match your pattern to the desired + extension. A comma delimited key value pairing (e.g. \\"cntl=.jcl,cpgm=.c\\" to map + the last segment of each data set (also known as the \\"low level qualifier\\" to + the desired local file extension).", + "name": "extension-map", + "type": "string", + }, + Object { + "aliases": Array [ + "ff", + ], + "defaultValue": true, + "description": "Set this option to false to continue downloading dataset members if one or more fail.", + "name": "fail-fast", + "type": "boolean", + }, + Object { + "aliases": Array [ + "mcr", + ], + "defaultValue": 1, + "description": "Specifies the maximum number of concurrent z/OSMF REST API requests to download members. Increasing the value results in faster downloads. However, increasing the value increases resource consumption on z/OS and can be prone to errors caused by making too many concurrent requests. If the download process encounters an error, the following message displays: +The maximum number of TSO address spaces have been created. When you specify 0, Zowe CLI attempts to download all members at once without a maximum number of concurrent requests. ", + "name": "max-concurrent-requests", + "numericValueRange": Array [ + 0, + 99999, + ], + "type": "number", + }, + Object { + "aliases": Array [ + "po", + ], + "defaultValue": false, + "description": "Specifies if the automatically generated directories and files use the original letter case", + "name": "preserve-original-letter-case", + "type": "boolean", + }, + Object { + "aliases": Array [ + "r", + ], + "conflictsWith": Array [ + "binary", + ], + "description": "Download the file content in record mode, which means that no data conversion is performed and the record length is prepended to the data. The data transfer process returns each line as-is, without translation. No delimiters are added between records. Conflicts with binary.", + "name": "record", + "type": "boolean", + }, + Object { + "aliases": Array [ + "vs", + ], + "description": "The volume serial (VOLSER) where the data set resides. You can use this option at any time. However, the VOLSER is required only when the data set is not cataloged on the system. A VOLSER is analogous to a drive name on a PC.", + "name": "volume-serial", + "type": "string", + }, +] +`; + +exports[`zos-files download dsm command definition should not have changed 2`] = ` +Array [ + Object { + "description": "Download all data sets beginning with \\"ibmuser\\" and ending with \\".cntl\\" or \\".jcl\\" to the local directory \\"jcl\\" to files with the extension \\".jcl\\"", + "options": "\\"ibmuser.**.cntl, ibmuser.**.jcl\\" --directory jcl --extension .jcl", + }, + Object { + "description": "Download all data sets that begin with \\"ibmuser.public.project\\" or \\"ibmuser.project.private\\", excluding those that end in \\"lib\\" to the local directory \\"project\\", providing a custom mapping of data set low level qualifier to local file extension", + "options": "\\"ibmuser.public.project.*, ibmuser.project.private.*\\" --exclude-patterns \\"ibmuser.public.**.*lib\\" --directory project --extension-map cpgm=c,asmpgm=asm,java=java,chdr=c,jcl=jcl,cntl=jcl", + }, +] +`; diff --git a/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.handler.unit.test.ts.snap b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.handler.unit.test.ts.snap new file mode 100644 index 0000000000..6559437fc5 --- /dev/null +++ b/packages/cli/__tests__/zosfiles/__unit__/download/dsm/__snapshots__/DataSetMatching.handler.unit.test.ts.snap @@ -0,0 +1,46 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Download DataSetMatching handler should download matching datasets if requested 1`] = ` +" +listed +" +`; + +exports[`Download DataSetMatching handler should download matching datasets if requested 2`] = `"downloaded"`; + +exports[`Download DataSetMatching handler should download matching datasets if requested 3`] = ` +Object { + "commandResponse": "downloaded", + "success": true, +} +`; + +exports[`Download DataSetMatching handler should handle generation of an exclusion list 1`] = ` +" +listed +" +`; + +exports[`Download DataSetMatching handler should handle generation of an exclusion list 2`] = `"downloaded"`; + +exports[`Download DataSetMatching handler should handle generation of an exclusion list 3`] = ` +Object { + "commandResponse": "downloaded", + "success": true, +} +`; + +exports[`Download DataSetMatching handler should handle generation of an extension map 1`] = ` +" +listed +" +`; + +exports[`Download DataSetMatching handler should handle generation of an extension map 2`] = `"downloaded"`; + +exports[`Download DataSetMatching handler should handle generation of an extension map 3`] = ` +Object { + "commandResponse": "downloaded", + "success": true, +} +`; diff --git a/packages/cli/package.json b/packages/cli/package.json index 3077c78811..bd50dc366f 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -58,7 +58,7 @@ }, "dependencies": { "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/perf-timing": "1.0.7", "@zowe/provisioning-for-zowe-sdk": "7.2.5", "@zowe/zos-console-for-zowe-sdk": "7.2.5", diff --git a/packages/cli/src/zosfiles/-strings-/en.ts b/packages/cli/src/zosfiles/-strings-/en.ts index 237c0ff029..8384de3c80 100644 --- a/packages/cli/src/zosfiles/-strings-/en.ts +++ b/packages/cli/src/zosfiles/-strings-/en.ts @@ -318,27 +318,23 @@ export default { DESCRIPTION: "Download all data sets that match a DSLEVEL pattern (see help below). " + "You can use several options to qualify which data sets will be skipped and how the " + "downloaded files will be structured. Data sets that are neither physical sequential nor " + - "partitioned data sets (with members) will be excluded. ", + "partitioned data sets (with members) will be excluded.", POSITIONALS: { - PATTERN: `The pattern or patterns to match data sets against. Also known as 'DSLEVEL'. The - following special sequences can be used in the pattern: + PATTERN: `The pattern or patterns to match data sets against. Also known as 'DSLEVEL'. The following special sequences can be ` + + `used in the pattern: ${TextUtils.chalk.yellow("%")}: matches any single character - ${TextUtils.chalk.yellow("*")}: matches any number of characters within a data set name qualifier (e.g. - "ibmuser.j*.old" matches "ibmuser.jcl.old" but not "ibmuser.jcl.very.old") - ${TextUtils.chalk.yellow("**")}: matches any number of characters within any number of data set name - qualifiers (e.g. "ibmuser.**.old" matches both "ibmuser.jcl.old" and - "ibmuser.jcl.very.old") - However, the pattern cannot begin with any of these sequences.You can specify - multiple patterns separated by commas, for example - "ibmuser.**.cntl,ibmuser.**.jcl"` - }, - EXAMPLES: { - EX1: `Download all data sets beginning with "ibmuser" and ending with - ".cntl" or ".jcl" to the local directory "jcl" to files with the extension ".jcl"`, - EX2: `Download all data sets that begin with "ibmuser.public.project" or - "ibmuser.project.private", excluding those that end in "lib" to the local - directory "project", providing a custom mapping of data set low level qualifier - to local file extension` + ${TextUtils.chalk.yellow("*")}: matches any number of characters within a data set name qualifier ` + + `(e.g. "ibmuser.j*.old" matches "ibmuser.jcl.old" but not "ibmuser.jcl.very.old") + ${TextUtils.chalk.yellow("**")}: matches any number of characters within any number of data set name qualifiers ` + + `(e.g. "ibmuser.**.old" matches both "ibmuser.jcl.old" and "ibmuser.jcl.very.old") + However, the pattern cannot begin with any of these sequences.You can specify multiple patterns separated by commas, ` + + `for example "ibmuser.**.cntl,ibmuser.**.jcl"` + }, + EXAMPLES: { + EX1: `Download all data sets beginning with "ibmuser" and ending with ".cntl" or ".jcl" to the local directory "jcl" to ` + + `files with the extension ".jcl"`, + EX2: `Download all data sets that begin with "ibmuser.public.project" or "ibmuser.project.private", excluding those that end ` + + `in "lib" to the local directory "project", providing a custom mapping of data set low level qualifier to local file extension` } } }, diff --git a/packages/cli/src/zosfiles/download/Download.definition.ts b/packages/cli/src/zosfiles/download/Download.definition.ts index 41510b3ac7..484d33199d 100644 --- a/packages/cli/src/zosfiles/download/Download.definition.ts +++ b/packages/cli/src/zosfiles/download/Download.definition.ts @@ -15,6 +15,7 @@ import { AllMembersDefinition } from "./am/AllMembers.definition"; import i18nTypings from "../-strings-/en"; import { UssFileDefinition } from "./uss/UssFile.definition"; +import { DataSetMatchingDefinition } from "./dsm/DataSetMatching.definition"; // Does not use the import in anticipation of some internationalization work to be done later. const strings = (require("../-strings-/en").default as typeof i18nTypings).DOWNLOAD; @@ -32,6 +33,7 @@ export const DownloadDefinition: ICommandDefinition = { children: [ DatasetDefinition, AllMembersDefinition, - UssFileDefinition + UssFileDefinition, + DataSetMatchingDefinition ] }; diff --git a/packages/cli/src/zosfiles/download/Download.options.ts b/packages/cli/src/zosfiles/download/Download.options.ts index 7b5e2b1575..dfd29034d3 100644 --- a/packages/cli/src/zosfiles/download/Download.options.ts +++ b/packages/cli/src/zosfiles/download/Download.options.ts @@ -104,7 +104,7 @@ export const DownloadOptions: { [key: string]: ICommandOptionDefinition } = { * The pattern to be excluded * @type {ICommandOptionDefinition} */ - excludePattern: { + excludePattern: { // for consistency, we should "break" this and make it plural :P name: "exclude-patterns", aliases: ["ep"], description: strings.EXCLUDE_PATTERN, diff --git a/packages/cli/src/zosfiles/download/dsm/DataSetMatching.definition.ts b/packages/cli/src/zosfiles/download/dsm/DataSetMatching.definition.ts new file mode 100644 index 0000000000..33df28658b --- /dev/null +++ b/packages/cli/src/zosfiles/download/dsm/DataSetMatching.definition.ts @@ -0,0 +1,65 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { ICommandDefinition } from "@zowe/imperative"; +import { DownloadOptions } from "../Download.options"; +import i18nTypings from "../../-strings-/en"; + +// Does not use the import in anticipation of some internationalization work to be done later. +const strings = (require("../../-strings-/en").default as typeof i18nTypings).DOWNLOAD.ACTIONS.DATA_SETS_MATCHING; + +/** + * Download all members command definition containing its description, examples and/or options + * @type {ICommandDefinition} + */ +export const DataSetMatchingDefinition: ICommandDefinition = { + name: "data-set-matching", + aliases: ["dsm"], + summary: strings.SUMMARY, + description: strings.DESCRIPTION, + type: "command", + handler: __dirname + "/DataSetMatching.handler", + profile: { + optional: ["zosmf"] + }, + positionals: [ + { + name: "pattern", + description: strings.POSITIONALS.PATTERN, + type: "string", + required: true + } + ], + options: [ + DownloadOptions.volume, + DownloadOptions.directory, + DownloadOptions.binary, + DownloadOptions.record, + DownloadOptions.encoding, + DownloadOptions.extension, + DownloadOptions.excludePattern, + DownloadOptions.extensionMap, + DownloadOptions.maxConcurrentRequests, + DownloadOptions.preserveOriginalLetterCase, + DownloadOptions.failFast + ].sort((a, b) => a.name.localeCompare(b.name)), + examples: [ + { + description: strings.EXAMPLES.EX1, + options: `"ibmuser.**.cntl, ibmuser.**.jcl" --directory jcl --extension .jcl` + }, + { + description: strings.EXAMPLES.EX2, + options: `"ibmuser.public.project.*, ibmuser.project.private.*" --exclude-patterns "ibmuser.public.**.*lib" ` + + `--directory project --extension-map cpgm=c,asmpgm=asm,java=java,chdr=c,jcl=jcl,cntl=jcl` + } + ] +}; diff --git a/packages/cli/src/zosfiles/download/dsm/DataSetMatching.handler.ts b/packages/cli/src/zosfiles/download/dsm/DataSetMatching.handler.ts new file mode 100644 index 0000000000..8bc145cd3d --- /dev/null +++ b/packages/cli/src/zosfiles/download/dsm/DataSetMatching.handler.ts @@ -0,0 +1,81 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { AbstractSession, IHandlerParameters, ImperativeError, ImperativeExpect, ITaskWithStatus, TaskStage } from "@zowe/imperative"; +import { IZosFilesResponse, Download, IDownloadOptions, IDsmListOptions, List } from "@zowe/zos-files-for-zowe-sdk"; +import { ZosFilesBaseHandler } from "../../ZosFilesBase.handler"; + +/** + * Handler to download all members from a pds + * @export + */ +export default class DataSetMatchingHandler extends ZosFilesBaseHandler { + public async processWithSession(commandParameters: IHandlerParameters, session: AbstractSession): Promise { + const extensionMap: {[key: string]: string} = {}; + try { + if (commandParameters.arguments.extensionMap) { + commandParameters.arguments.extensionMap = commandParameters.arguments.extensionMap.toLowerCase(); + const unoptimizedMap = commandParameters.arguments.extensionMap.split(","); + for (const entry of unoptimizedMap) { + const splitEntry = entry.split("="); + ImperativeExpect.toBeEqual(splitEntry.length, 2); + extensionMap[splitEntry[0]] = splitEntry[1]; + } + } + } catch (err) { + throw new ImperativeError({msg: "An error occurred processing the extension map.", causeErrors: err}); + } + + const listStatus: ITaskWithStatus = { + statusMessage: "Searching for data sets", + percentComplete: 0, + stageName: TaskStage.IN_PROGRESS + }; + const listOptions: IDsmListOptions = { + excludePatterns: commandParameters.arguments.excludePatterns?.split(","), + maxConcurrentRequests: commandParameters.arguments.maxConcurrentRequests, + task: listStatus, + responseTimeout: commandParameters.arguments.responseTimeout + }; + + commandParameters.response.progress.startBar({ task: listStatus }); + const response = await List.dataSetsMatchingPattern(session, commandParameters.arguments.pattern.split(","), listOptions); + commandParameters.response.progress.endBar(); + if (response.success) { + commandParameters.response.console.log(`\r${response.commandResponse}\n`); + } else { + return response; + } + + const downloadStatus: ITaskWithStatus = { + statusMessage: "Downloading data sets", + percentComplete: 0, + stageName: TaskStage.IN_PROGRESS + }; + const downloadOptions: IDownloadOptions = { + volume: commandParameters.arguments.volumeSerial, + binary: commandParameters.arguments.binary, + record: commandParameters.arguments.record, + encoding: commandParameters.arguments.encoding, + directory: commandParameters.arguments.directory, + extension: commandParameters.arguments.extension, + extensionMap: commandParameters.arguments.extensionMap ? extensionMap : undefined, + maxConcurrentRequests: commandParameters.arguments.maxConcurrentRequests, + preserveOriginalLetterCase: commandParameters.arguments.preserveOriginalLetterCase, + failFast: commandParameters.arguments.failFast, + task: downloadStatus, + responseTimeout: commandParameters.arguments.responseTimeout + }; + + commandParameters.response.progress.startBar({ task: downloadStatus }); + return Download.allDataSets(session, response.apiResponse, downloadOptions); + } +} diff --git a/packages/core/package.json b/packages/core/package.json index e85a6bf190..833027dd39 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -51,7 +51,7 @@ "devDependencies": { "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "chalk": "^4.1.0", "eslint": "^7.32.0", "madge": "^4.0.1", diff --git a/packages/provisioning/package.json b/packages/provisioning/package.json index b21444130d..5581bf015a 100644 --- a/packages/provisioning/package.json +++ b/packages/provisioning/package.json @@ -52,7 +52,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/workflows/package.json b/packages/workflows/package.json index e523a84feb..a241703dad 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -51,7 +51,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zosconsole/package.json b/packages/zosconsole/package.json index 30720989b7..5c04612aea 100644 --- a/packages/zosconsole/package.json +++ b/packages/zosconsole/package.json @@ -48,7 +48,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zosfiles/CHANGELOG.md b/packages/zosfiles/CHANGELOG.md index 9c261b9a69..bdc4f4e523 100644 --- a/packages/zosfiles/CHANGELOG.md +++ b/packages/zosfiles/CHANGELOG.md @@ -2,6 +2,10 @@ All notable changes to the Zowe z/OS files SDK package will be documented in this file. +## Recent Changes + +- Enhancement: Added `Download.dataSetsMatchingPattern` method to download all data sets that match a DSLEVEL pattern. + ## `7.0.0` - Major: Introduced Team Profiles, Daemon mode, and more. See the prerelease items (if any) below for more details. diff --git a/packages/zosfiles/__tests__/__system__/methods/download/Download.system.test.ts b/packages/zosfiles/__tests__/__system__/methods/download/Download.system.test.ts index cc150a0f6d..e2f1b0612b 100644 --- a/packages/zosfiles/__tests__/__system__/methods/download/Download.system.test.ts +++ b/packages/zosfiles/__tests__/__system__/methods/download/Download.system.test.ts @@ -93,7 +93,8 @@ describe("Download Data Set", () => { // delete the top-level folder and the folders and file below // variable 'file' should be set in the test const folders = file.split("/"); - const rc = rimraf(folders[0]); + let rc = rimraf(folders[0]); + rc = rimraf(file); }); it("should download a data set", async () => { @@ -563,7 +564,7 @@ describe("Download Data Set", () => { file = dsname.replace(regex, "/") + "/member.dat"; }); }); - describe("Data sets matching - all data sets", () => { + describe("Data sets matching - all data sets - PO", () => { beforeEach(async () => { let error; @@ -577,6 +578,200 @@ describe("Download Data Set", () => { } }); + afterEach(async () => { + let error; + let response; + + try { + response = await Delete.dataSet(REAL_SESSION, dsname); + await delay(delayTime); + } catch (err) { + error = err; + } + + // delete the top-level folder and the folders and file below + try { + const folders = file.split("/"); + const rc = rimraf(folders[0]); + } catch { + // Do nothing, sometimes the files are not created. + } + }); + + it("should download a data set", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname + "(member)"; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PO", vol: "*" }]); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file + const regex = /\./gi; + file = dsname.toLowerCase().replace(regex, "/"); + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}/member.txt`).toString()); + expect(fileContents).toEqual(data); + }); + + it("should download a data set in binary mode", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname + "(member)"; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + const options: IDownloadOptions = { + binary: true, + extension: ".txt", + directory: "testDir" + }; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PO", vol: "*" }], options); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file for clean up in AfterEach + const regex = /\./gi; + file = "testDir/" + dsname.replace(regex, "/") + "/member.txt"; + }); + + it("should download a data set in record mode", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname + "(member)"; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + const options: IDownloadOptions = { + record: true, + extension: ".txt", + directory: "testDir" + }; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PO", vol: "*" }], options); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file for clean up in AfterEach + const regex = /\./gi; + file = "testDir/" + dsname.replace(regex, "/") + "/member.txt"; + }); + + it("should download a data set with a different extension", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname + "(member)"; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PO", vol: "*" }], {extension: "jcl"}); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file + const regex = /\./gi; + file = dsname.toLowerCase().replace(regex, "/"); + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}/member.jcl`).toString()); + expect(fileContents).toEqual(data); + }); + + it("should download a data set with an extension map", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname + "(member)"; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + const ending = dsname.split(".").pop().toLowerCase(); + const extMap: any = {}; + extMap[ending] = "jcl"; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PO", vol: "*" }], {extensionMap: extMap}); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file + const regex = /\./gi; + file = dsname.toLowerCase().replace(regex, "/"); + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}/member.jcl`).toString()); + expect(fileContents).toEqual(data); + }); + }); + + describe("Data sets matching - all data sets - PS", () => { + + beforeEach(async () => { + let error; + let response; + + try { + response = await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_SEQUENTIAL, dsname); + await delay(delayTime); + } catch (err) { + error = err; + } + }); + afterEach(async () => { let error; let response; @@ -590,7 +785,159 @@ describe("Download Data Set", () => { // delete the top-level folder and the folders and file below const folders = file.split("/"); - const rc = rimraf(folders[0]); + let rc = rimraf(folders[0]); + rc = rimraf(file); + }); + + it("should download a data set", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PS", vol: "*" }]); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + // convert the data set name to use as a path/file + const regex = /\./gi; + file = dsname.toLowerCase() + ".txt"; + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}`).toString()); + expect(fileContents).toEqual(data); + }); + + it("should download a data set in binary mode", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + const options: IDownloadOptions = { + binary: true, + extension: ".txt", + directory: "testDir" + }; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PS", vol: "*" }], options); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + file = "testdir/" + dsname.toLowerCase() + ".txt"; + }); + + it("should download a data set in record mode", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + const options: IDownloadOptions = { + record: true, + extension: ".txt", + directory: "testDir" + }; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PS", vol: "*" }], options); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + file = "testdir/" + dsname.toLowerCase() + ".txt"; + }); + + it("should download a data set with a different extension", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PS", vol: "*" }], {extension: "jcl"}); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + file = dsname.toLowerCase() + ".jcl"; + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}`).toString()); + expect(fileContents).toEqual(data); + }); + + it("should download a data set with an extension map", async () => { + let error; + let response: IZosFilesResponse; + + // TODO - convert to UPLOAD APIs when available + // upload data to the newly created data set + const data: string = "abcdefghijklmnopqrstuvwxyz"; + const endpoint: string = ZosFilesConstants.RESOURCE + ZosFilesConstants.RES_DS_FILES + "/" + dsname; + const rc = await ZosmfRestClient.putExpectString(REAL_SESSION, endpoint, [], data); + const ending = dsname.split(".").pop().toLowerCase(); + const extMap: any = {}; + extMap[ending] = "jcl"; + + try { + response = await Download.allDataSets(REAL_SESSION, [{ dsname, dsorg: "PS", vol: "*" }], {extensionMap: extMap}); + Imperative.console.info("Response: " + inspect(response)); + } catch (err) { + error = err; + Imperative.console.info("Error: " + inspect(error)); + } + expect(error).toBeFalsy(); + expect(response).toBeTruthy(); + expect(response.success).toBeTruthy(); + expect(response.commandResponse).toContain("1 data set(s) downloaded successfully"); + + file = dsname.toLowerCase() + ".jcl"; + // Compare the downloaded contents to those uploaded + const fileContents = stripNewLines(readFileSync(`${file}`).toString()); + expect(fileContents).toEqual(data); }); }); }); @@ -960,4 +1307,3 @@ describe("Download Data Set", () => { }); }); }); - diff --git a/packages/zosfiles/__tests__/__system__/methods/list/List.system.test.ts b/packages/zosfiles/__tests__/__system__/methods/list/List.system.test.ts index 4b0852a411..738e0c25f0 100644 --- a/packages/zosfiles/__tests__/__system__/methods/list/List.system.test.ts +++ b/packages/zosfiles/__tests__/__system__/methods/list/List.system.test.ts @@ -9,9 +9,9 @@ * */ -import { Create, CreateDataSetTypeEnum, Delete, IListOptions, IZosFilesResponse, List, Upload } from "../../../../src"; +import { Create, CreateDataSetTypeEnum, Delete, IListOptions, IZosFilesResponse, List, Upload, ZosFilesMessages } from "../../../../src"; import { Imperative, Session } from "@zowe/imperative"; -import { inspect } from "util"; +import { format, inspect } from "util"; import { ITestEnvironment } from "@zowe/cli-test-utils"; import { TestEnvironment } from "../../../../../../__tests__/__src__/environment/TestEnvironment"; import { ITestPropertiesSchema } from "../../../../../../__tests__/__src__/properties/ITestPropertiesSchema"; @@ -445,4 +445,75 @@ describe("List command group", () => { }); + describe("dataSetsMatchingPattern", () => { + beforeEach(async () => { + await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_SEQUENTIAL, dsname, + { volser: defaultSystem.datasets.vol }); + await delay(delayTime); + await Create.dataSet(REAL_SESSION, CreateDataSetTypeEnum.DATA_SET_SEQUENTIAL, dsname + ".LIKE", + { volser: defaultSystem.datasets.vol }); + await delay(delayTime); + }); + + afterEach(async () => { + await Delete.dataSet(REAL_SESSION, dsname); + await delay(delayTime); + await Delete.dataSet(REAL_SESSION, dsname + ".LIKE"); + await delay(delayTime); + }); + + it("should find data sets that match a pattern", async () => { + let response; + let caughtError; + + try { + response = await List.dataSetsMatchingPattern(REAL_SESSION, [dsname]); + } catch (error) { + caughtError = error; + } + + expect(caughtError).toBeUndefined(); + expect(response).toBeDefined(); + expect(response.success).toBe(true); + expect(response.commandResponse).toContain(format(ZosFilesMessages.dataSetsMatchedPattern.message, 2)); + expect(response.apiResponse.length).toBe(2); + expect(response.apiResponse[0].dsname).toBe(dsname); + expect(response.apiResponse[1].dsname).toBe(dsname + ".LIKE"); + }); + + it("should exclude data sets that do not match a pattern", async () => { + let response; + let caughtError; + + try { + response = await List.dataSetsMatchingPattern(REAL_SESSION, [dsname], + { excludePatterns: [dsname + ".LIKE"] }); + } catch (error) { + caughtError = error; + } + + expect(caughtError).toBeUndefined(); + expect(response).toBeDefined(); + expect(response.success).toBe(true); + expect(response.commandResponse).toContain(format(ZosFilesMessages.dataSetsMatchedPattern.message, 1)); + expect(response.apiResponse.length).toBe(1); + expect(response.apiResponse[0].dsname).toBe(dsname); + }); + + it("should fail when no data sets match", async () => { + let response; + let caughtError; + + try { + response = await List.dataSetsMatchingPattern(REAL_SESSION, [dsname + ".INVALID"]); + } catch (error) { + caughtError = error; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(dsname + ".INVALID"); + }); + }); + }); diff --git a/packages/zosfiles/__tests__/__unit__/methods/download/Download.unit.test.ts b/packages/zosfiles/__tests__/__unit__/methods/download/Download.unit.test.ts index a880c39455..33967c7851 100644 --- a/packages/zosfiles/__tests__/__unit__/methods/download/Download.unit.test.ts +++ b/packages/zosfiles/__tests__/__unit__/methods/download/Download.unit.test.ts @@ -9,7 +9,7 @@ * */ -import { IO, Session } from "@zowe/imperative"; +import { ImperativeError, IO, Session } from "@zowe/imperative"; import { Utilities, ZosFilesMessages } from "../../../../src"; import { ZosmfHeaders, ZosmfRestClient } from "@zowe/core-for-zowe-sdk"; import { Download } from "../../../../src/methods/download/Download"; @@ -18,6 +18,7 @@ import { ZosFilesConstants } from "../../../../src/constants/ZosFiles.constants" import * as util from "util"; import { List } from "../../../../src/methods/list"; import { CLIENT_PROPERTY } from "../../../../src/doc/types/ZosmfRestClientProperties"; +import { IDownloadDsmResult } from "../../../../src/methods/download/doc/IDownloadDsmResult"; describe("z/OS Files - Download", () => { const dsname = "USER.DATA.SET"; @@ -983,10 +984,11 @@ describe("z/OS Files - Download", () => { }); }); - describe("datasetMatchingPattern", () => { + describe("allDataSets", () => { const listDataSetSpy = jest.spyOn(List, "dataSet"); const downloadDatasetSpy = jest.spyOn(Download, "dataSet"); const downloadAllMembersSpy = jest.spyOn(Download, "allMembers"); + const createDirsSpy = jest.spyOn(IO, "createDirsSyncFromFilePath"); const dataSetPS = { dsname: "TEST.PS.DATA.SET", @@ -1000,15 +1002,640 @@ describe("z/OS Files - Download", () => { beforeEach(() => { downloadDatasetSpy.mockClear(); - downloadDatasetSpy.mockImplementation(() => null); + downloadDatasetSpy.mockResolvedValue(undefined); downloadAllMembersSpy.mockClear(); - downloadAllMembersSpy.mockImplementation(() => null); + downloadAllMembersSpy.mockResolvedValue(undefined); listDataSetSpy.mockClear(); - listDataSetSpy.mockImplementation(() => null); + listDataSetSpy.mockResolvedValue(undefined); }); + it("should handle an error from Download.dataSet", async () => { + let response; + let caughtError; + + const dummyError = new Error("test"); + downloadDatasetSpy.mockImplementation(async () => { + throw dummyError; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeInstanceOf(ImperativeError); + expect(caughtError.message).toBe("Failed to download TEST.PS.DATA.SET"); + expect(caughtError.causeErrors).toEqual(dummyError); + + expect(downloadDatasetSpy).toHaveBeenCalledTimes(1); + expect(downloadDatasetSpy).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, { + directory: undefined, + extension: undefined, + file: `${dataSetPS.dsname.toLocaleLowerCase()}.txt` + }); + }); + + it("should handle an error from Download.allMembers", async () => { + let response; + let caughtError; + + const dummyError = new Error("test"); + downloadAllMembersSpy.mockImplementation(async () => { + throw dummyError; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPO] as any); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeInstanceOf(ImperativeError); + expect(caughtError.message).toBe("Failed to download TEST.PO.DATA.SET"); + expect(caughtError.causeErrors).toEqual(dummyError); + + expect(downloadAllMembersSpy).toHaveBeenCalledTimes(1); + expect(downloadAllMembersSpy).toHaveBeenCalledWith(dummySession, dataSetPO.dsname, {directory: "test/po/data/set"}); + }); + + it("should download all datasets specifying the directory, extension and binary mode", async () => { + let response; + let caughtError; + + const directory = "my/test/path"; + const extension = "xyz"; + const binary = true; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {directory, extension, binary}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {directory}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {binary, file: "my/test/path/test.ps.data.set.xyz"}); + }); + + it("should download all datasets specifying preserveOriginalLetterCase", async () => { + let response; + let caughtError; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, { preserveOriginalLetterCase: true }); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, { + file: "TEST.PS.DATA.SET.txt", + preserveOriginalLetterCase: true + }); + }); + + it("should download all datasets specifying the extension", async () => { + let response; + let caughtError; + + const extension = "xyz"; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {extension}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {file: "test.ps.data.set.xyz"}); + }); + + it("should download all datasets with maxConcurrentRequests set to zero", async () => { + let response; + let caughtError; + + const maxConcurrentRequests = 0; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {maxConcurrentRequests}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {file: "test.ps.data.set.txt", maxConcurrentRequests: 0}); + }); + + it("should download all datasets while specifying an extension with a leading dot", async () => { + let response; + let caughtError; + + const directory = "my/test/path"; + const extension = ".xyz"; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {directory, extension}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {directory}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {file: "my/test/path/test.ps.data.set.xyz"}); + }); + + it("should download all datasets specifying the directory and extension map 1", async () => { + let response; + let caughtError; + + const directory = "my/test/path"; + const extensionMap = {set: "file"}; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {directory, extensionMap}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {directory}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {extensionMap, file: "my/test/path/test.ps.data.set.file"}); + }); + + it("should download all datasets specifying the directory and extension map 2", async () => { + let response; + let caughtError; + + const directory = "my/test/path"; + const extensionMap = {fake: "file"}; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any, {directory, extensionMap}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {directory}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + expect(Download.dataSet).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {extensionMap, file: "my/test/path/test.ps.data.set.txt"}); + }); + + it("should download all datasets without any options", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, [dataSetPS] as any); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PS.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ ...dataSetPS, status: "Data set downloaded" }] + }); + }); + + it("should not download datasets when pattern does not match any", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, []); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.missingDataSets.message); + }); + + it("should not download datasets when pattern matches only datasets which failed to list attributes", async () => { + let response; + let caughtError; + + Download.dataSet = jest.fn(); + + try { + response = await Download.allDataSets(dummySession, [{ + dsname: dataSetPO.dsname, + error: new Error("i haz bad data set") + }, dataSetPS] as any); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.failedToDownloadDataSets.message); + expect(Download.dataSet).toHaveBeenCalledTimes(0); + }); + + it("should not download datasets when pattern matches only archived datasets", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, [{ dsname: dataSetPS.dsname }] as any); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.failedToDownloadDataSets.message); + }); + + it("should not download datasets when pattern matches only unsupported datasets", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, [{ + dsname: "TEST.DATA.SET", + dsorg: "unknown" + }] as any); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.failedToDownloadDataSets.message); + }); + + it("should download datasets when pattern matches only datasets which failed to list attributes and failFast is false", async () => { + const fakeError = new Error("i haz bad data set"); + let response; + let caughtError; + + Download.dataSet = jest.fn(async () => { + return { + commandResponse: "Data set downloaded", + apiResponse: { + items: [dataSetPS] + }, + }; + }); + + try { + response = await Download.allDataSets(dummySession, [{ + dsname: dataSetPO.dsname, + error: fakeError + }, dataSetPS] as any, { failFast: false }); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.datasetDownloadFailed.message); + expect(caughtError.message).toContain(dataSetPO.dsname); + expect(Download.dataSet).toHaveBeenCalledTimes(1); + }); + + it("should download datasets when pattern matches only archived datasets and failFast is false", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, [{ + dsname: dataSetPS.dsname, + vol: "MIGRATC" + }] as any, { failFast: false }); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: false, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: [], + failedArchived: ["TEST.PS.DATA.SET"], + failedUnsupported: [], + failedWithErrors: {} + }, { failFast: false }), + apiResponse: [{ + dsname: "TEST.PS.DATA.SET", + vol: "MIGRATC", + status: "Skipped: Archived data set or alias - type MIGRATC." + }] + }); + }); + + it("should download datasets when pattern matches only unsupported datasets and failFast is false", async () => { + let response; + let caughtError; + + try { + response = await Download.allDataSets(dummySession, [{ + dsname: "TEST.DATA.SET", + dsorg: "unknown" + }] as any, { failFast: false }); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: false, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: [], + failedArchived: [], + failedUnsupported: ["TEST.DATA.SET"], + failedWithErrors: {} + }, { failFast: false }), + apiResponse: [{ + dsname: "TEST.DATA.SET", + dsorg: "unknown", + status: "Skipped: Unsupported data set - type unknown." + }] + }); + }); + + it("should download datasets when pattern matches a partitioned dataset", async () => { + let response; + let caughtError; + + downloadAllMembersSpy.mockImplementation(async () => { + return { + apiResponse: { + items: [ + { member: "TESTDS" } + ] + }, + commandResponse: util.format(ZosFilesMessages.datasetDownloadedSuccessfully.message, "./") + }; + }); + + List.allMembers = jest.fn(async () => { + return { + apiResponse: { + items: [ + { member: "TESTDS" } + ] + } + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPO] as any); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PO.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ + ...dataSetPO, + status: util.format(ZosFilesMessages.datasetDownloadedSuccessfully.message, "./") + "\nMembers: TESTDS;" + }] + }); + }); + + it("should download datasets when pattern matches a partitioned dataset with no members", async () => { + let response; + let caughtError; + + createDirsSpy.mockClear(); + downloadAllMembersSpy.mockImplementation(async () => { + return { + apiResponse: { + items: [] + }, + commandResponse: ZosFilesMessages.noMembersFound.message + }; + }); + + List.allMembers = jest.fn(async () => { + return { + apiResponse: { + items: [] + } + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPO] as any); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PO.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {}), + apiResponse: [{ + ...dataSetPO, + status: ZosFilesMessages.noMembersFound.message + }] + }); + expect(createDirsSpy).toHaveBeenCalledTimes(1); + expect(createDirsSpy).toHaveBeenCalledWith("test/po/data/set"); + }); + + it("should download datasets when pattern matches a partitioned dataset and directory is supplied", async () => { + let response; + let caughtError; + const directory = "my/test/path/"; + + downloadAllMembersSpy.mockImplementation(async () => { + return { + apiResponse: { + items: [ + { member: "TESTDS" } + ] + }, + commandResponse: util.format(ZosFilesMessages.datasetDownloadedSuccessfully.message, directory) + }; + }); + + List.allMembers = jest.fn(async () => { + return { + apiResponse: { + items: [ + { member: "TESTDS" } + ] + } + }; + }); + + try { + response = await Download.allDataSets(dummySession, [dataSetPO] as any, {directory}); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: (Download as any).buildDownloadDsmResponse({ + downloaded: ["TEST.PO.DATA.SET"], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }, {directory}), + apiResponse: [{ + ...dataSetPO, + status: util.format(ZosFilesMessages.datasetDownloadedSuccessfully.message, directory) + "\nMembers: TESTDS;" + }] + }); + }); }); describe("USS File", () => { @@ -1385,4 +2012,50 @@ describe("z/OS Files - Download", () => { expect(ioWriteStreamSpy).toHaveBeenCalledWith(destination); }); }); + + describe("buildDownloadDsmResponse", () => { + it("should build response with data sets that downloaded successfully", () => { + const result: IDownloadDsmResult = (Download as any).emptyDownloadDsmResult(); + result.downloaded = ["HLQ.DS.TEST"]; + const response: string = (Download as any).buildDownloadDsmResponse(result, {}); + expect(response).toContain("1 data set(s) downloaded successfully"); + expect(response).not.toContain("1 data set(s) failed to download"); + }); + + it("should build response with data sets skipped because they are archived", () => { + const result: IDownloadDsmResult = (Download as any).emptyDownloadDsmResult(); + result.failedArchived = ["HLQ.DS.SKIPPED"]; + const response: string = (Download as any).buildDownloadDsmResponse(result, {}); + expect(response).toContain("1 data set(s) failed to download"); + expect(response).toContain("1 failed because they are archived"); + }); + + it("should build response with data sets skipped because they are an unsupported type", () => { + const result: IDownloadDsmResult = (Download as any).emptyDownloadDsmResult(); + result.failedUnsupported = ["HLQ.DS.SKIPPED"]; + const response: string = (Download as any).buildDownloadDsmResponse(result, {}); + expect(response).toContain("1 data set(s) failed to download"); + expect(response).toContain("1 failed because they are an unsupported type"); + }); + + it("should build response with data sets that failed to download", () => { + const errorMsg = "i haz bad data set"; + const result: IDownloadDsmResult = (Download as any).emptyDownloadDsmResult(); + result.failedWithErrors = { "HLQ.DS.FAILED": new Error(errorMsg) }; + const response: string = (Download as any).buildDownloadDsmResponse(result, {}); + expect(response).toContain("1 data set(s) failed to download"); + expect(response).toContain(errorMsg); + expect(response).toContain("Some data sets may have been skipped because --fail-fast is true"); + }); + + it("should build response with data sets that failed to download when failFast is false", () => { + const errorMsg = "i haz bad data set"; + const result: IDownloadDsmResult = (Download as any).emptyDownloadDsmResult(); + result.failedWithErrors = { "HLQ.DS.FAILED": new Error(errorMsg) }; + const response: string = (Download as any).buildDownloadDsmResponse(result, { failFast: false }); + expect(response).toContain("1 data set(s) failed to download"); + expect(response).toContain(errorMsg); + expect(response).not.toContain("Some data sets may have been skipped because --fail-fast is true"); + }); + }); }); diff --git a/packages/zosfiles/__tests__/__unit__/methods/list/List.unit.test.ts b/packages/zosfiles/__tests__/__unit__/methods/list/List.unit.test.ts index 58ebf00bd2..c392ae6bed 100644 --- a/packages/zosfiles/__tests__/__unit__/methods/list/List.unit.test.ts +++ b/packages/zosfiles/__tests__/__unit__/methods/list/List.unit.test.ts @@ -16,6 +16,7 @@ import { ZosFilesMessages } from "../../../../src/constants/ZosFiles.messages"; import { posix } from "path"; import { ZosFilesConstants } from "../../../../src/constants/ZosFiles.constants"; import { IListOptions } from "../../../../src"; +import * as util from "util"; describe("z/OS Files - List", () => { const expectJsonSpy = jest.spyOn(ZosmfRestClient, "getExpectJSON"); @@ -881,4 +882,174 @@ describe("z/OS Files - List", () => { }); }); + + describe("dataSetsMatchingPattern", () => { + const listDataSetSpy = jest.spyOn(List, "dataSet"); + + const dataSetPS = { + dsname: "TEST.PS.DATA.SET", + dsorg: "PS" + }; + + const dataSetPO = { + dsname: "TEST.PO.DATA.SET", + dsorg: "PO" + }; + + beforeEach(() => { + listDataSetSpy.mockClear(); + listDataSetSpy.mockResolvedValue(undefined); + }); + + it("should successfully list PS and PO data sets using the List.dataSet API", async () => { + const pattern = "TEST.**.DATA.SET"; + let response; + let caughtError; + + listDataSetSpy.mockImplementation(async () => { + return { + apiResponse: { + items: [dataSetPS, dataSetPO] + } + }; + }); + + try { + response = await List.dataSetsMatchingPattern(dummySession, [pattern]); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: util.format(ZosFilesMessages.dataSetsMatchedPattern.message, 2), + apiResponse: [dataSetPS, dataSetPO] + }); + + expect(listDataSetSpy).toHaveBeenCalledTimes(1); + expect(listDataSetSpy).toHaveBeenCalledWith(dummySession, pattern, {attributes: true}); + }); + + it("should throw an error if the data set name is not specified", async () => { + let response; + let caughtError; + + // Test for NULL + try { + response = await List.dataSetsMatchingPattern(dummySession, null); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.missingPatterns.message); + + caughtError = undefined; + // Test for UNDEFINED + try { + response = await List.dataSetsMatchingPattern(dummySession, undefined); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.missingPatterns.message); + + caughtError = undefined; + // Test for EMPTY + try { + response = await List.dataSetsMatchingPattern(dummySession, ["", undefined, null]); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toBeDefined(); + expect(caughtError.message).toContain(ZosFilesMessages.missingPatterns.message); + }); + + it("should handle an error from the List.dataSet API", async () => { + const dummyError = new Error("test2"); + let response; + let caughtError; + + listDataSetSpy.mockImplementation(async () => { + throw dummyError; + }); + + try { + response = await List.dataSetsMatchingPattern(dummySession, [dataSetPS.dsname]); + } catch (e) { + caughtError = e; + } + + expect(response).toBeUndefined(); + expect(caughtError).toEqual(dummyError); + + expect(listDataSetSpy).toHaveBeenCalledTimes(2); + expect(listDataSetSpy).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {attributes: true}); + }); + + it("should handle an error from the List.dataSet API and fall back to fetching attributes sequentially", async () => { + let response; + let caughtError; + + listDataSetSpy.mockImplementationOnce(async () => { + throw new Error("test2"); + }).mockImplementation(async () => { + return { + apiResponse: { + items: [dataSetPS] + } + }; + }); + + try { + response = await List.dataSetsMatchingPattern(dummySession, [dataSetPS.dsname]); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: true, + commandResponse: util.format(ZosFilesMessages.dataSetsMatchedPattern.message, 1), + apiResponse: [dataSetPS] + }); + + expect(listDataSetSpy).toHaveBeenCalledTimes(3); + expect(listDataSetSpy).toHaveBeenCalledWith(dummySession, dataSetPS.dsname, {attributes: true}); + }); + + it("should handle an error when the exclude pattern is specified", async () => { + const excludePatterns = ["TEST.PS.DATA.SET"]; + let response; + let caughtError; + + List.dataSet = jest.fn(async () => { + return { + apiResponse: { + items: [dataSetPS] + } + }; + }); + + try { + response = await List.dataSetsMatchingPattern( + dummySession, [dataSetPS.dsname], { excludePatterns }); + } catch (e) { + caughtError = e; + } + + expect(caughtError).toBeUndefined(); + expect(response).toEqual({ + success: false, + commandResponse: util.format(ZosFilesMessages.noDataSetsInList.message), + apiResponse: [] + }); + }); + }); }); diff --git a/packages/zosfiles/package.json b/packages/zosfiles/package.json index cf9945a33e..c282592e09 100644 --- a/packages/zosfiles/package.json +++ b/packages/zosfiles/package.json @@ -52,7 +52,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "@zowe/zos-uss-for-zowe-sdk": "7.2.5", "eslint": "^7.32.0", "madge": "^4.0.1", diff --git a/packages/zosfiles/src/constants/ZosFiles.messages.ts b/packages/zosfiles/src/constants/ZosFiles.messages.ts index d1c5f1aba0..bcf14edc01 100644 --- a/packages/zosfiles/src/constants/ZosFiles.messages.ts +++ b/packages/zosfiles/src/constants/ZosFiles.messages.ts @@ -177,11 +177,20 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { /** * Message indicating that the data sets matching pattern was downloaded successfully * @type {IMessageDefinition} + * @deprecated */ datasetsDownloadedSuccessfully: { message: "Data sets matching pattern downloaded successfully.\nDestination: %s" }, + /** + * Message indicating that the data sets matching pattern were listed successfully + * @type {IMessageDefinition} + */ + dataSetsMatchedPattern: { + message: "%d data set(s) were found matching pattern." + }, + /** * Message indicating that file is uploaded to data set successfully * @type {IMessageDefinition} @@ -358,9 +367,18 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { message: "No pattern to match data sets passed." }, + /** + * Message indicating that data set objects were not passed. + * @type {IMessageDefinition} + */ + missingDataSets: { + message: "No list of data sets to download was passed." + }, + /** * Message indicating that all data sets matching the provided patterns are archived. * @type {IMessageDefinition} + * @deprecated */ allDataSetsArchived: { message: "All data sets matching the selected pattern(s) were archived." @@ -369,6 +387,7 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { /** * Message indicating that no data sets remain to be downloaded after the excluded ones were filtered out. * @type {IMessageDefinition} + * @deprecated */ noDataSetsInList: { message: "No data sets left after excluded pattern(s) were filtered out." @@ -385,6 +404,7 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { /** * Message indicating that no data sets remain to be downloaded after the excluded ones were filtered out. * @type {IMessageDefinition} + * @deprecated */ noDataSetsMatchingPatternRemain: { message: "After filtering out the archived files and files that match the exclusion-parameters, no data sets matching" + @@ -394,11 +414,20 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { /** * Message indicating that only empty partitioned data sets match the provided patterns * @type {IMessageDefinition} + * @deprecated */ onlyEmptyPartitionedDataSets: { message: "Only empty partitioned data sets match the provided patterns." }, + /** + * Message indicating that some or all data sets failed to download + * @type {IMessageDefinition} + */ + failedToDownloadDataSets: { + message: "Failed to download data sets" + }, + /** * Message indicating that a failure has happened in the NodeJS File System API */ @@ -594,6 +623,14 @@ export const ZosFilesMessages: { [key: string]: IMessageDefinition } = { message: "Failed to download the following members: \n" }, + /** + * Message indicating that the following data sets failed to properly download + * @type {IMessageDefinition} + */ + datasetDownloadFailed: { + message: "Failed to download the following data sets: \n" + }, + /** * Message indicating the attributes are used during dataset creation * @type {IMessageDefinition} diff --git a/packages/zosfiles/src/methods/download/Download.ts b/packages/zosfiles/src/methods/download/Download.ts index 93fe4d535a..488bb4af38 100644 --- a/packages/zosfiles/src/methods/download/Download.ts +++ b/packages/zosfiles/src/methods/download/Download.ts @@ -9,7 +9,7 @@ * */ -import { AbstractSession, ImperativeExpect, IO, Logger, TaskProgress, ImperativeError } from "@zowe/imperative"; +import { AbstractSession, ImperativeExpect, IO, Logger, TaskProgress, ImperativeError, TextUtils } from "@zowe/imperative"; import { posix } from "path"; import * as util from "util"; @@ -25,6 +25,17 @@ import { IRestClientResponse } from "../../doc/IRestClientResponse"; import { CLIENT_PROPERTY } from "../../doc/types/ZosmfRestClientProperties"; import { IOptionsFullResponse } from "../../doc/IOptionsFullResponse"; import { Utilities } from "../utilities"; +import { IZosmfListResponse } from "../list/doc/IZosmfListResponse"; +import { IDownloadDsmResult } from "./doc/IDownloadDsmResult"; + +type IZosmfListResponseWithStatus = IZosmfListResponse & { error?: Error; status?: string }; + +interface IDownloadDsmTask { + handler: (session: AbstractSession, dsname: string, options: IDownloadOptions) => Promise; + dsname: string; + options: IDownloadOptions; + onSuccess: (response: IZosFilesResponse, options: IDownloadOptions) => void; +} /** * This class holds helper functions that are used to download data sets, members and more through the z/OS MF APIs @@ -61,6 +72,7 @@ export class Download { // required ImperativeExpect.toNotBeNullOrUndefined(dataSetName, ZosFilesMessages.missingDatasetName.message); ImperativeExpect.toNotBeEqual(dataSetName, "", ZosFilesMessages.missingDatasetName.message); + let destination: string; try { // Format the endpoint to send the request to @@ -85,7 +97,7 @@ export class Download { // Get a proper destination for the file to be downloaded // If the "file" is not provided, we create a folder structure similar to the data set name // Note that the "extension" options do not affect the destination if the "file" options were provided - const destination = (() => { + destination = (() => { if (options.file) { return options.file; } @@ -137,6 +149,10 @@ export class Download { } catch (error) { Logger.getAppLogger().error(error); + if (destination != null) { + IO.deleteFile(destination); + } + throw error; } } @@ -233,19 +249,18 @@ export class Download { encoding: options.encoding, responseTimeout: options.responseTimeout }).catch((err) => { - // If we should fail fast, rethrow error - if (options.failFast || options.failFast === undefined) { - throw err; - } downloadErrors.push(err); failedMembers.push(fileName); // Delete the file that could not be downloaded IO.deleteFile(baseDir + IO.FILE_DELIM + fileName + IO.normalizeExtension(extension)); + // If we should fail fast, rethrow error + if (options.failFast || options.failFast === undefined) { + throw err; + } }); }; const maxConcurrentRequests = options.maxConcurrentRequests == null ? 1 : options.maxConcurrentRequests; - if (maxConcurrentRequests === 0) { await Promise.all(memberList.map(createDownloadPromise)); } else { @@ -275,6 +290,190 @@ export class Download { } } + /** + * Download a list of data sets to local files + * + * @param {AbstractSession} session - z/OS MF connection info + * @param {IZosmfListResponse[]} dataSetObjs - contains data set objects returned by z/OSMF List API + * @param {IDownloadOptions} [options={}] - contains the options to be sent + * + * @returns {Promise} A response indicating the outcome of the API + * + * @throws {ImperativeError} data set name must be set + * @throws {Error} When the {@link ZosmfRestClient} throws an error + * + * @example + * ```typescript + * + * // Download a list of "PS" and "PO" datasets to the directory "./path/to/dir/" + * await Download.allDataSets(session, [ + * { dsname: "USER.DATA.SET.PS", dsorg: "PS" }, + * { dsname: "USER.DATA.SET.PDS", dsorg: "PO" } + * ], {directory: "./path/to/dir/"}); + * ``` + * + * @see https://www.ibm.com/support/knowledgecenter/SSLTBW_2.2.0/com.ibm.zos.v2r2.izua700/IZUHPINFO_API_GetReadDataSet.htm + */ + public static async allDataSets(session: AbstractSession, dataSetObjs: IZosmfListResponse[], + options: IDownloadOptions = {}): Promise { + ImperativeExpect.toNotBeEqual(dataSetObjs.length, 0, ZosFilesMessages.missingDataSets.message); + const result = this.emptyDownloadDsmResult(); + const zosmfResponses: IZosmfListResponseWithStatus[] = [...dataSetObjs]; + + try { + // Download data sets + const poDownloadTasks: IDownloadDsmTask[] = []; + const psDownloadTasks: IDownloadDsmTask[] = []; + const mutableOptions: IDownloadOptions = { ...options, task: undefined }; + + for (const dataSetObj of zosmfResponses) { + let llq = dataSetObj.dsname.substring(dataSetObj.dsname.lastIndexOf(".") + 1, dataSetObj.dsname.length); + if (!options.preserveOriginalLetterCase) { + llq = llq.toLowerCase(); + } + if (options.extensionMap != null) { + mutableOptions.extension = options.extensionMap[llq] ?? options.extension; + } + + // Normalize the extension, remove leading periods + if (mutableOptions.extension && mutableOptions.extension.startsWith(".")) { + mutableOptions.extension = mutableOptions.extension.replace(/^\.+/g, ""); + } + + if (options.directory == null) { + if (dataSetObj.dsorg === "PO" || dataSetObj.dsorg === "PO-E") { + mutableOptions.directory = ZosFilesUtils.getDirsFromDataSet(dataSetObj.dsname); + } else { + mutableOptions.file = `${dataSetObj.dsname}.` + + `${mutableOptions.extension ?? ZosFilesUtils.DEFAULT_FILE_EXTENSION}`; + if (!options.preserveOriginalLetterCase) { + mutableOptions.file = mutableOptions.file.toLowerCase(); + } + mutableOptions.directory = undefined; + mutableOptions.extension = undefined; + } + } else if (dataSetObj.dsorg === "PO" || dataSetObj.dsorg === "PO-E") { + mutableOptions.directory = `${mutableOptions.directory}/${ZosFilesUtils.getDirsFromDataSet(dataSetObj.dsname)}`; + } else { + mutableOptions.file = `${mutableOptions.directory}/${dataSetObj.dsname}.` + + `${mutableOptions.extension ?? ZosFilesUtils.DEFAULT_FILE_EXTENSION}`; + if (!options.preserveOriginalLetterCase) { + mutableOptions.file = mutableOptions.file.toLowerCase(); + } + mutableOptions.directory = undefined; + mutableOptions.extension = undefined; + } + + if (dataSetObj.error != null) { + result.failedWithErrors[dataSetObj.dsname] = dataSetObj.error; + } else if (dataSetObj.dsorg == null) { + dataSetObj.status = `Skipped: Archived data set or alias - type ${dataSetObj.vol}.`; + result.failedArchived.push(dataSetObj.dsname); + } else if (dataSetObj.dsorg === "PS") { + psDownloadTasks.push({ + handler: Download.dataSet.bind(this), + dsname: dataSetObj.dsname, + options: { ...mutableOptions }, + onSuccess: (downloadResponse) => { + dataSetObj.status = downloadResponse.commandResponse; + } + }); + } else if (dataSetObj.dsorg === "PO" || dataSetObj.dsorg === "PO-E") { + poDownloadTasks.push({ + handler: Download.allMembers.bind(this), + dsname: dataSetObj.dsname, + options: { ...mutableOptions }, + onSuccess: (downloadResponse, options) => { + dataSetObj.status = downloadResponse.commandResponse; + const listMembers: string[] = downloadResponse.apiResponse.items.map((item: any) => ` ${item.member}`); + if (listMembers.length === 0) { // Create directory for empty PO data set + IO.createDirsSyncFromFilePath(options.directory); + } else { + dataSetObj.status += `\nMembers: ${listMembers};`; + } + } + }); + } else { + dataSetObj.status = `Skipped: Unsupported data set - type ${dataSetObj.dsorg}.`; + result.failedUnsupported.push(dataSetObj.dsname); + } + mutableOptions.directory = options.directory; + } + + // If we should fail fast, throw error + if ((result.failedArchived.length > 0 || result.failedUnsupported.length > 0 || + Object.keys(result.failedWithErrors).length > 0) && options.failFast !== false) { + throw new ImperativeError({ + msg: ZosFilesMessages.failedToDownloadDataSets.message, + additionalDetails: this.buildDownloadDsmResponse(result, options) + }); + } + + let downloadsInitiated = 0; + const createDownloadPromise = (task: IDownloadDsmTask) => { + if (options.task != null) { + options.task.statusMessage = "Downloading data set " + task.dsname; + options.task.percentComplete = Math.floor(TaskProgress.ONE_HUNDRED_PERCENT * + (downloadsInitiated / (poDownloadTasks.length + psDownloadTasks.length))); + downloadsInitiated++; + } + + return task.handler(session, task.dsname, task.options).then( + (downloadResponse) => { + result.downloaded.push(task.dsname); + task.onSuccess(downloadResponse, task.options); + }, + (err) => { + result.failedWithErrors[task.dsname] = err; + // If we should fail fast, rethrow error + if (options.failFast || options.failFast === undefined) { + throw new ImperativeError({ + msg: `Failed to download ${task.dsname}`, + causeErrors: err, + additionalDetails: this.buildDownloadDsmResponse(result, options) + }); + } + } + ); + }; + + // First download the partitioned data sets + // We execute the promises sequentially to make sure that + // we do not exceed `--mcr` when downloading multiple members + for (const task of poDownloadTasks) { + await createDownloadPromise(task); + } + + // Next download the sequential data sets in a pool + const maxConcurrentRequests = options.maxConcurrentRequests == null ? 1 : options.maxConcurrentRequests; + if (maxConcurrentRequests === 0) { + await Promise.all(psDownloadTasks.map(createDownloadPromise)); + } else { + await asyncPool(maxConcurrentRequests, psDownloadTasks, createDownloadPromise); + } + } catch (error) { + Logger.getAppLogger().error(error); + + throw error; + } + + // Handle failed downloads if no errors were thrown yet + if (Object.keys(result.failedWithErrors).length > 0) { + throw new ImperativeError({ + msg: ZosFilesMessages.datasetDownloadFailed.message + Object.keys(result.failedWithErrors).join("\n"), + causeErrors: Object.values(result.failedWithErrors), + additionalDetails: this.buildDownloadDsmResponse(result, options) + }); + } + + const numFailed = result.failedArchived.length + result.failedUnsupported.length + Object.keys(result.failedWithErrors).length; + return { + success: numFailed === 0, + commandResponse: this.buildDownloadDsmResponse(result, options), + apiResponse: zosmfResponses + }; + } + /** * Retrieve USS file content and save it in your local workspace. * @@ -351,5 +550,65 @@ export class Download { } } -} + /** + * Create an empty download data sets matching result. + * @returns Results object with all lists initialized as empty + */ + private static emptyDownloadDsmResult(): IDownloadDsmResult { + return { + downloaded: [], + failedArchived: [], + failedUnsupported: [], + failedWithErrors: {} + }; + } + + /** + * Build a response string from a download data sets matching result. + * @param result Result object from the download API + * @param options Options passed to the download API + * @returns Response string to print to console + */ + private static buildDownloadDsmResponse(result: IDownloadDsmResult, options: IDownloadOptions = {}): string { + const failedDsnames = Object.keys(result.failedWithErrors); + const numFailed = result.failedArchived.length + result.failedUnsupported.length + failedDsnames.length; + const responseLines = []; + + if (result.downloaded.length > 0) { + responseLines.push(TextUtils.chalk.green(`${result.downloaded.length} data set(s) downloaded successfully to `) + + (options.directory ?? "./")); + } + if (numFailed > 0) { + responseLines.push(TextUtils.chalk.red(`${numFailed} data set(s) failed to download:`)); + if (result.failedArchived.length > 0) { + responseLines.push( + TextUtils.chalk.yellow(`${result.failedArchived.length} failed because they are archived`), + ...result.failedArchived.map(dsname => ` ${dsname}`) + ); + } + if (result.failedUnsupported.length > 0) { + responseLines.push( + TextUtils.chalk.yellow(`${result.failedUnsupported.length} failed because they are an unsupported type`), + ...result.failedUnsupported.map(dsname => ` ${dsname}`) + ); + } + if (failedDsnames.length > 0) { + responseLines.push( + TextUtils.chalk.yellow(`${failedDsnames.length} failed because of an uncaught error`), + ...failedDsnames.map(dsname => ` ${dsname}`), + "", + ...Object.values(result.failedWithErrors).map((err: Error) => err.message) + ); + } + if (options.failFast !== false) { + responseLines.push( + "\nSome data sets may have been skipped because --fail-fast is true.", + "To ignore errors and continue downloading, rerun the command with --fail-fast set to false." + ); + } + } + + return responseLines.join("\n") + "\n"; + } +} diff --git a/packages/zosfiles/src/methods/download/doc/IDownloadDsmResult.ts b/packages/zosfiles/src/methods/download/doc/IDownloadDsmResult.ts new file mode 100644 index 0000000000..342bbce881 --- /dev/null +++ b/packages/zosfiles/src/methods/download/doc/IDownloadDsmResult.ts @@ -0,0 +1,39 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +/** + * This interface defines results of the download data sets matching command. + * @export + * @interface IDownloadDsmResult + */ +export interface IDownloadDsmResult { + /** + * List of data set names that have downloaded successfully. + */ + downloaded: string[]; + + /** + * List of data set names that failed to download because they are archived. + */ + failedArchived: string[]; + + /** + * List of data set names that failed to download because they are an + * unsupported type. + */ + failedUnsupported: string[]; + + /** + * Object containing key-value pairs of data set names and errors for data + * sets that failed to download. + */ + failedWithErrors: { [key: string]: Error }; +} diff --git a/packages/zosfiles/src/methods/download/doc/IDownloadOptions.ts b/packages/zosfiles/src/methods/download/doc/IDownloadOptions.ts index 9f2b7a7df8..c42c08a478 100644 --- a/packages/zosfiles/src/methods/download/doc/IDownloadOptions.ts +++ b/packages/zosfiles/src/methods/download/doc/IDownloadOptions.ts @@ -39,6 +39,7 @@ export interface IDownloadOptions extends IOptions { * Exclude data sets that match these DSLEVEL patterns. Any data sets that match * this pattern will not be downloaded * @example "ibmuser.**.jcl, ibmuser.rexa.*" + * @deprecated Use the `List.dataSetsMatchingPattern` API to match data sets and exclude patterns */ excludePatterns?: string[]; diff --git a/packages/zosfiles/src/methods/list/List.ts b/packages/zosfiles/src/methods/list/List.ts index 940e2bc2e3..127cf2ce4f 100644 --- a/packages/zosfiles/src/methods/list/List.ts +++ b/packages/zosfiles/src/methods/list/List.ts @@ -9,17 +9,20 @@ * */ -import { AbstractSession, IHeaderContent, ImperativeExpect, Logger } from "@zowe/imperative"; +import { AbstractSession, IHeaderContent, ImperativeExpect, Logger, TaskProgress } from "@zowe/imperative"; import { posix } from "path"; +import * as util from "util"; -import { ZosmfRestClient, ZosmfHeaders } from "@zowe/core-for-zowe-sdk"; +import { ZosmfRestClient, ZosmfHeaders, asyncPool } from "@zowe/core-for-zowe-sdk"; import { ZosFilesConstants } from "../../constants/ZosFiles.constants"; import { ZosFilesMessages } from "../../constants/ZosFiles.messages"; import { IZosFilesResponse } from "../../doc/IZosFilesResponse"; import { IListOptions } from "./doc/IListOptions"; import { IUSSListOptions } from "./doc/IUSSListOptions"; import { IFsOptions } from "./doc/IFsOptions"; +import { IZosmfListResponse } from "./doc/IZosmfListResponse"; +import { IDsmListOptions } from "./doc/IDsmListOptions"; /** * This class holds helper functions that are used to list data sets and its members through the z/OS MF APIs @@ -287,6 +290,107 @@ export class List { } } + /** + * List data sets that match a DSLEVEL pattern + * @param {AbstractSession} session z/OSMF connection info + * @param {string[]} patterns Data set patterns to include + * @param {IDsmListOptions} options Contains options for the z/OSMF request + * @returns {Promise} List of z/OSMF list responses for each data set + * + * @example + * ```typescript + * + * // List all "PS" and "PO" datasets that match the pattern "USER.**.DATASET" + * await List.dataSetsMatchingPattern(session, "USER.**.DATASET"); + * ``` + */ + public static async dataSetsMatchingPattern(session: AbstractSession, patterns: string[], + options: IDsmListOptions = {}): Promise { + + // Pattern is required to be non-empty + ImperativeExpect.toNotBeNullOrUndefined(patterns, ZosFilesMessages.missingPatterns.message); + patterns = patterns.filter(Boolean); + ImperativeExpect.toNotBeEqual(patterns.length, 0, ZosFilesMessages.missingPatterns.message); + const zosmfResponses: IZosmfListResponse[] = []; + + // Get names of all data sets + for (const pattern of patterns) { + let response: any; + try { + response = await List.dataSet(session, pattern, { attributes: true }); + } catch (err) { + // Listing data sets with attributes may fail sometimes, for + // example if a TSO prompt is triggered. If that happens, we + // try first to list them all without attributes, and then fetch + // the attributes for each data set one by one. When an error + // is thrown we record it on the response object. This is a slow + // process but better than throwing an error. + response = await List.dataSet(session, pattern); + + let listsInitiated = 0; + const createListPromise = (dataSetObj: any) => { + if (options.task != null) { + options.task.percentComplete = Math.floor(TaskProgress.ONE_HUNDRED_PERCENT * + (listsInitiated / response.apiResponse.items.length)); + listsInitiated++; + } + + return List.dataSet(session, dataSetObj.dsname, { attributes: true }).then( + (tempResponse) => { + Object.assign(dataSetObj, tempResponse.apiResponse.items[0]); + }, + (tempErr) => { + Object.assign(dataSetObj, { error: tempErr }); + } + ); + }; + + const maxConcurrentRequests = options.maxConcurrentRequests == null ? 1 : options.maxConcurrentRequests; + if (maxConcurrentRequests === 0) { + await Promise.all(response.apiResponse.items.map(createListPromise)); + } else { + await asyncPool(maxConcurrentRequests, response.apiResponse.items, createListPromise); + } + } + zosmfResponses.push(...response.apiResponse.items); + } + + // Check if data sets matching pattern found + if (zosmfResponses.length === 0) { + return { + success: false, + commandResponse: ZosFilesMessages.noDataSetsMatchingPattern.message, + apiResponse: [] + }; + } + + // Exclude names of data sets + for (const pattern of (options.excludePatterns || [])) { + const response = await List.dataSet(session, pattern); + response.apiResponse.items.forEach((dataSetObj: IZosmfListResponse) => { + const responseIndex = zosmfResponses.findIndex(response => response.dsname === dataSetObj.dsname); + if (responseIndex !== -1) { + zosmfResponses.splice(responseIndex, 1); + } + }); + } + + // Check if exclude pattern has left any data sets in the list + if (zosmfResponses.length === 0) { + return { + success: false, + commandResponse: ZosFilesMessages.noDataSetsInList.message, + apiResponse: [] + }; + } + + return { + success: true, + commandResponse: util.format(ZosFilesMessages.dataSetsMatchedPattern.message, zosmfResponses.length), + apiResponse: zosmfResponses + }; + } + private static get log() { return Logger.getAppLogger(); } diff --git a/packages/zosfiles/src/methods/list/doc/IDsmListOptions.ts b/packages/zosfiles/src/methods/list/doc/IDsmListOptions.ts new file mode 100644 index 0000000000..cc7bfc3ccf --- /dev/null +++ b/packages/zosfiles/src/methods/list/doc/IDsmListOptions.ts @@ -0,0 +1,41 @@ +/* +* This program and the accompanying materials are made available under the terms of the +* Eclipse Public License v2.0 which accompanies this distribution, and is available at +* https://www.eclipse.org/legal/epl-v20.html +* +* SPDX-License-Identifier: EPL-2.0 +* +* Copyright Contributors to the Zowe Project. +* +*/ + +import { ITaskWithStatus } from "@zowe/imperative"; +import { IZosFilesOptions } from "../../../doc/IZosFilesOptions"; + +/** + * This interface defines the options that can be sent into the list data sets matching function + */ +export interface IDsmListOptions extends IZosFilesOptions { + /** + * Exclude data sets that match these DSLEVEL patterns. Any data sets that match + * this pattern will not be listed + * @example "ibmuser.**.jcl, ibmuser.rexa.*" + */ + excludePatterns?: string[]; + + /** + * The maximum REST requests to perform at once + * Increasing this value results in faster requests but increases resource consumption + * on z/OS and risks encountering an error caused + * by making too many requests at once. + * Default: 1 + */ + maxConcurrentRequests?: number; + + /** + * Task status object used by CLI handlers to create progress bars + * Optional + * @type {ITaskWithStatus} + */ + task?: ITaskWithStatus; +} diff --git a/packages/zosfiles/src/methods/list/doc/IUSSListOptions.ts b/packages/zosfiles/src/methods/list/doc/IUSSListOptions.ts index 3b210d5aa7..b1ed0892f3 100644 --- a/packages/zosfiles/src/methods/list/doc/IUSSListOptions.ts +++ b/packages/zosfiles/src/methods/list/doc/IUSSListOptions.ts @@ -12,7 +12,7 @@ import { IZosFilesOptions } from "../../../doc/IZosFilesOptions"; /** - * This interface defines the options that can be sent into the USS list files qfunction + * This interface defines the options that can be sent into the USS list files function */ export interface IUSSListOptions extends IZosFilesOptions { diff --git a/packages/zosfiles/src/methods/list/index.ts b/packages/zosfiles/src/methods/list/index.ts index 7f05ea444a..820082f5d8 100644 --- a/packages/zosfiles/src/methods/list/index.ts +++ b/packages/zosfiles/src/methods/list/index.ts @@ -9,6 +9,7 @@ * */ +export * from "./doc/IDsmListOptions"; export * from "./doc/IFsOptions"; export * from "./doc/IListOptions"; export * from "./doc/IUSSListOptions"; diff --git a/packages/zosjobs/package.json b/packages/zosjobs/package.json index 8b10f83488..6164e724a4 100644 --- a/packages/zosjobs/package.json +++ b/packages/zosjobs/package.json @@ -52,7 +52,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zoslogs/package.json b/packages/zoslogs/package.json index 5de8e3a5a1..362f7b1099 100644 --- a/packages/zoslogs/package.json +++ b/packages/zoslogs/package.json @@ -48,7 +48,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zosmf/package.json b/packages/zosmf/package.json index 7fed801b8e..becc34aa95 100644 --- a/packages/zosmf/package.json +++ b/packages/zosmf/package.json @@ -47,7 +47,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zostso/package.json b/packages/zostso/package.json index eea44575fc..923ed94862 100644 --- a/packages/zostso/package.json +++ b/packages/zostso/package.json @@ -51,7 +51,7 @@ "@types/node": "^12.12.24", "@zowe/cli-test-utils": "7.2.5", "@zowe/core-for-zowe-sdk": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3", diff --git a/packages/zosuss/package.json b/packages/zosuss/package.json index 7a6ca983fd..d5818e1cc7 100644 --- a/packages/zosuss/package.json +++ b/packages/zosuss/package.json @@ -51,7 +51,7 @@ "@types/node": "^12.12.24", "@types/ssh2": "^0.5.44", "@zowe/cli-test-utils": "7.2.5", - "@zowe/imperative": "5.3.3", + "@zowe/imperative": "5.3.4", "eslint": "^7.32.0", "madge": "^4.0.1", "rimraf": "^2.6.3",