diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot b/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot index 96feec2f8133..9312b4afd18a 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot @@ -48,17 +48,13 @@ Wait Til Date Past ${sleepSeconds} = Subtract Date From Date ${date} ${latestDate} Run Keyword If ${sleepSeconds} > 0 Sleep ${sleepSeconds} -*** Variables *** -${ENDPOINT_URL} http://s3g:9878 -${BUCKET} generated - -*** Test Cases *** - Test Multipart Upload With Adjusted Length + [Arguments] ${BUCKET} Perform Multipart Upload ${BUCKET} multipart/adjusted_length_${PREFIX} /tmp/part1 /tmp/part2 Verify Multipart Upload ${BUCKET} multipart/adjusted_length_${PREFIX} /tmp/part1 /tmp/part2 Test Multipart Upload + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -87,6 +83,7 @@ Test Multipart Upload Test Multipart Upload Complete + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey1 --metadata="custom-key1=custom-value1,custom-key2=custom-value2,gdprEnabled=true" ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -139,12 +136,14 @@ Test Multipart Upload Complete Compare files /tmp/part2 /tmp/${PREFIX}-multipartKey1-part2.result Test Multipart Upload with user defined metadata size larger than 2 KB + [Arguments] ${BUCKET} ${custom_metadata_value} = Execute printf 'v%.0s' {1..3000} ${result} = Execute AWSS3APICli and checkrc create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/mpuWithLargeMetadata --metadata="custom-key1=${custom_metadata_value}" 255 Should contain ${result} MetadataTooLarge Should not contain ${result} custom-key1: ${custom_metadata_value} Test Multipart Upload Complete Entity too small + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey2 ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -168,6 +167,7 @@ Test Multipart Upload Complete Entity too small Test Multipart Upload Complete Invalid part errors and complete mpu with few parts + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey3 ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -219,6 +219,7 @@ Test Multipart Upload Complete Invalid part errors and complete mpu with few par Compare files /tmp/part3 /tmp/${PREFIX}-multipartKey3-part3.result Test abort Multipart upload + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey4 --storage-class REDUCED_REDUNDANCY ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -228,15 +229,18 @@ Test abort Multipart upload ${result} = Execute AWSS3APICli and checkrc abort-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey4 --upload-id ${uploadID} 0 Test abort Multipart upload with invalid uploadId + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc abort-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey5 --upload-id "random" 255 Upload part with Incorrect uploadID + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey Execute echo "Multipart upload" > /tmp/testfile ${result} = Execute AWSS3APICli and checkrc upload-part --bucket ${BUCKET} --key ${PREFIX}/multipartKey --part-number 1 --body /tmp/testfile --upload-id "random" 255 Should contain ${result} NoSuchUpload Test list parts + [Arguments] ${BUCKET} #initiate multipart upload ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey5 ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 @@ -279,6 +283,7 @@ Test list parts ${result} = Execute AWSS3APICli and checkrc abort-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/multipartKey5 --upload-id ${uploadID} 0 Test Multipart Upload with the simplified aws s3 cp API + [Arguments] ${BUCKET} Create Random file 22 Execute AWSS3Cli cp /tmp/part1 s3://${BUCKET}/mpyawscli Execute AWSS3Cli cp s3://${BUCKET}/mpyawscli /tmp/part1.result @@ -286,6 +291,7 @@ Test Multipart Upload with the simplified aws s3 cp API Compare files /tmp/part1 /tmp/part1.result Test Multipart Upload Put With Copy + [Arguments] ${BUCKET} Run Keyword Create Random file 5 ${result} = Execute AWSS3APICli put-object --bucket ${BUCKET} --key ${PREFIX}/copytest/source --body /tmp/part1 @@ -308,6 +314,7 @@ Test Multipart Upload Put With Copy Compare files /tmp/part1 /tmp/part-result Test Multipart Upload Put With Copy and range + [Arguments] ${BUCKET} Run Keyword Create Random file 10 ${result} = Execute AWSS3APICli put-object --bucket ${BUCKET} --key ${PREFIX}/copyrange/source --body /tmp/part1 @@ -335,6 +342,7 @@ Test Multipart Upload Put With Copy and range Compare files /tmp/part1 /tmp/part-result Test Multipart Upload Put With Copy and range with IfModifiedSince + [Arguments] ${BUCKET} Run Keyword Create Random file 10 ${curDate} = Get Current Date ${beforeCreate} = Subtract Time From Date ${curDate} 1 day @@ -388,6 +396,7 @@ Test Multipart Upload Put With Copy and range with IfModifiedSince Compare files /tmp/part1 /tmp/part-result Test Multipart Upload list + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/listtest/key1 ${uploadID1} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0 Should contain ${result} ${BUCKET} @@ -406,3 +415,84 @@ Test Multipart Upload list ${count} = Execute and checkrc echo '${result}' | jq -r '.Uploads | length' 0 Should Be Equal ${count} 2 + +*** Variables *** +${ENDPOINT_URL} http://s3g:9878 +${BUCKET} generated +${BUCKET1} generated + +*** Test Cases *** +Test Multipart Upload With Adjusted Length with OBS + Test Multipart Upload With Adjusted Length ${BUCKET} +Test Multipart Upload With Adjusted Length with FSO + Test Multipart Upload With Adjusted Length ${BUCKET1} + +Test Multipart Upload with OBS + Test Multipart Upload ${BUCKET} +Test Multipart Upload with FSO + Test Multipart Upload ${BUCKET1} + +Test Multipart Upload Complete with OBS + Test Multipart Upload Complete ${BUCKET} +Test Multipart Upload Complete with FSO + Test Multipart Upload Complete ${BUCKET1} + +Test Multipart Upload with user defined metadata size larger than 2 KB with OBS + Test Multipart Upload with user defined metadata size larger than 2 KB ${BUCKET} +Test Multipart Upload with user defined metadata size larger than 2 KB with FSO + Test Multipart Upload with user defined metadata size larger than 2 KB ${BUCKET1} + +Test Multipart Upload Complete Entity too small with OBS + Test Multipart Upload Complete Entity too small ${BUCKET} +Test Multipart Upload Complete Entity too small with FSO + Test Multipart Upload Complete Entity too small ${BUCKET1} + +Test Multipart Upload Complete Invalid part errors and complete mpu with few parts with OBS + Test Multipart Upload Complete Invalid part errors and complete mpu with few parts ${BUCKET} +Test Multipart Upload Complete Invalid part errors and complete mpu with few parts with FSO + Test Multipart Upload Complete Invalid part errors and complete mpu with few parts ${BUCKET1} + +Test abort Multipart upload with OBS + Test abort Multipart upload ${BUCKET} +Test abort Multipart upload with FSO + Test abort Multipart upload ${BUCKET1} + +Test abort Multipart upload with invalid uploadId with OBS + Test abort Multipart upload with invalid uploadId ${BUCKET} +Test abort Multipart upload with invalid uploadId with FSO + Test abort Multipart upload with invalid uploadId ${BUCKET1} + +Upload part with Incorrect uploadID with OBS + Upload part with Incorrect uploadID ${BUCKET} +Upload part with Incorrect uploadID with FSO + Upload part with Incorrect uploadID ${BUCKET1} + +Test list parts with OBS + Test list parts ${BUCKET} +Test list parts with FSO + Test list parts ${BUCKET1} + +Test Multipart Upload with the simplified aws s3 cp API with OBS + Test Multipart Upload with the simplified aws s3 cp API ${BUCKET} +Test Multipart Upload with the simplified aws s3 cp API with FSO + Test Multipart Upload with the simplified aws s3 cp API ${BUCKET1} + +Test Multipart Upload Put With Copy with OBS + Test Multipart Upload Put With Copy ${BUCKET} +Test Multipart Upload Put With Copy with FSO + Test Multipart Upload Put With Copy ${BUCKET1} + +Test Multipart Upload Put With Copy and range with OBS + Test Multipart Upload Put With Copy and range ${BUCKET} +Test Multipart Upload Put With Copy and range with FSO + Test Multipart Upload Put With Copy and range ${BUCKET1} + +Test Multipart Upload Put With Copy and range with IfModifiedSince with OBS + Test Multipart Upload Put With Copy and range with IfModifiedSince ${BUCKET} +Test Multipart Upload Put With Copy and range with IfModifiedSince with FSO + Test Multipart Upload Put With Copy and range with IfModifiedSince ${BUCKET1} + +Test Multipart Upload list with OBS + Test Multipart Upload list ${BUCKET} +Test Multipart Upload list with FSO + Test Multipart Upload list ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot index be2e24b6e4c9..2da7606c6b28 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot @@ -25,10 +25,11 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** - +*** Keywords *** File upload and directory list + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET} Should contain ${result} upload @@ -48,9 +49,21 @@ File upload and directory list Should contain ${result} file File upload with special chars + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/specialchars/a+b Should contain ${result} upload ${result} = Execute AWSS3Cli ls s3://${BUCKET}/specialchars/ Should not contain ${result} 'a b' Should contain ${result} a+b + +*** Test Cases *** +File upload and directory list with OBS + File upload and directory list ${BUCKET} +File upload and directory list with FSO + File upload and directory list ${BUCKET1} + +File upload with special chars with OBS + File upload with special chars ${BUCKET} +File upload with special chars with FSO + File upload with special chars ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/boto3.robot b/hadoop-ozone/dist/src/main/smoketest/s3/boto3.robot index 9fcb46acda78..00473240f6bd 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/boto3.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/boto3.robot @@ -27,9 +27,18 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated ${S3_SMOKETEST_DIR} /opt/hadoop/smoketest/s3 -*** Test Cases *** +*** Keywords *** Boto3 Client Test + [Arguments] ${BUCKET} ${result} = Execute python3 ${S3_SMOKETEST_DIR}/boto_client.py ${ENDPOINT_URL} ${BUCKET} + +*** Test Cases *** + +Boto3 Client Test with OBS + Boto3 Client Test ${BUCKET} +Boto3 Client Test with FSO + Boto3 Client Test ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot index 7b737773891a..9a3b93ae7d9f 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot @@ -25,16 +25,23 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** +*** Keywords *** Head Bucket + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli head-bucket --bucket ${BUCKET} +*** Test Cases *** +Head Bucket with OBS + Head Bucket ${BUCKET} +Head Bucket with FSO + Head Bucket ${BUCKET} + Head Bucket not existent [tags] no-bucket-type ${randStr} = Generate Ozone String ${result} = Execute AWSS3APICli and checkrc head-bucket --bucket ozonenosuchbucketqqweqwe-${randStr} 255 Should contain ${result} 404 Should contain ${result} Not Found - diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot index 1c60f4abe72f..1356456b0f8e 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot @@ -25,14 +25,17 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** +*** Keywords *** List buckets + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli list-buckets | jq -r '.Buckets[].Name' Should contain ${result} ${BUCKET} Get bucket info with Ozone Shell to check the owner field + [Arguments] ${BUCKET} Pass Execution If '${SECURITY_ENABLED}' == 'false' Skipping this check as security is not enabled ${result} = Execute ozone sh bucket info /s3v/${BUCKET} | jq -r '.owner' Should Be Equal ${result} testuser @@ -40,6 +43,18 @@ Get bucket info with Ozone Shell to check the owner field # in the way that getShortUserName() converts the accessId to "testuser". # Also see "Setup dummy credentials for S3" in commonawslib.robot +*** Test Cases *** + +List buckets with OBS + List buckets ${BUCKET} +List buckets with FSO + List buckets ${BUCKET1} + +Get bucket info with Ozone Shell to check the owner field with OBS + Get bucket info with Ozone Shell to check the owner field ${BUCKET} +Get bucket info with Ozone Shell to check the owner field with FSO + Get bucket info with Ozone Shell to check the owner field ${BUCKET1} + List buckets with empty access id [setup] Save AWS access key Execute aws configure set aws_access_key_id '' diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot index b20537014dd1..530acf414032 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot @@ -23,7 +23,9 @@ ${ENDPOINT_URL} http://s3g:9878 ${OZONE_S3_HEADER_VERSION} v4 ${OZONE_S3_SET_CREDENTIALS} true ${BUCKET} generated +${BUCKET1} generated ${BUCKET_LAYOUT} OBJECT_STORE +${BUCKET_LAYOUT1} FILE_SYSTEM_OPTIMIZED ${KEY_NAME} key1 ${OZONE_S3_TESTS_SET_UP} ${FALSE} ${OZONE_AWS_ACCESS_KEY_ID} ${EMPTY} @@ -141,7 +143,8 @@ Setup s3 tests Run Keyword Generate random prefix Run Keyword Install aws cli Run Keyword if '${OZONE_S3_SET_CREDENTIALS}' == 'true' Setup v4 headers - Run Keyword if '${BUCKET}' == 'generated' Create generated bucket ${BUCKET_LAYOUT} + Run Keyword if '${BUCKET}' == 'generated' Create generated bucket with OBS ${BUCKET_LAYOUT} + Run Keyword if '${BUCKET1}' == 'generated' Create generated bucket with FSO ${BUCKET_LAYOUT1} Run Keyword if '${BUCKET}' == 'link' Setup links for S3 tests Run Keyword if '${BUCKET}' == 'encrypted' Create encrypted bucket Run Keyword if '${BUCKET}' == 'erasure' Create EC bucket @@ -154,11 +157,16 @@ Setup links for S3 tests Execute ozone sh bucket create --layout ${BUCKET_LAYOUT} o3://${OM_SERVICE_ID}/legacy/source-bucket Create link link -Create generated bucket +Create generated bucket with OBS [Arguments] ${layout}=OBJECT_STORE ${BUCKET} = Create bucket with layout ${layout} Set Global Variable ${BUCKET} +Create generated bucket with FSO + [Arguments] ${layout}=FILE_SYSTEM_OPTIMIZED + ${BUCKET1} = Create bucket with layout ${layout} + Set Global Variable ${BUCKET1} + Create encrypted bucket Return From Keyword if '${SECURITY_ENABLED}' == 'false' ${exists} = Bucket Exists o3://${OM_SERVICE_ID}/s3v/encrypted diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/freon.robot b/hadoop-ozone/dist/src/main/smoketest/s3/freon.robot index f87a9ba1a77f..ba030c84f2ec 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/freon.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/freon.robot @@ -25,6 +25,7 @@ Default Tags no-bucket-type *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated *** Keywords *** # Export access key and secret to the environment @@ -40,6 +41,10 @@ Freon S3BG Should contain ${result} Successful executions: ${n} *** Test Cases *** -Run Freon S3BG +Run Freon S3BG with OBS [Setup] Setup aws credentials Freon S3BG s3bg-${BUCKET} + +Run Freon S3BG with FSO + [Setup] Setup aws credentials + Freon S3BG s3bg-${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectcopy.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectcopy.robot index e2bca772bcd9..6b1f5ad447e3 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objectcopy.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectcopy.robot @@ -25,6 +25,7 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated ${DESTBUCKET} generated1 *** Keywords *** @@ -33,8 +34,8 @@ Create Dest Bucket Set Suite Variable ${DESTBUCKET} destbucket-${postfix} Execute AWSS3APICli create-bucket --bucket ${DESTBUCKET} -*** Test Cases *** Copy Object Happy Scenario + [Arguments] ${BUCKET} Run Keyword if '${DESTBUCKET}' == 'generated1' Create Dest Bucket Execute date > /tmp/copyfile ${file_checksum} = Execute md5sum /tmp/copyfile | awk '{print $1}' @@ -77,12 +78,14 @@ Copy Object Happy Scenario Should Not contain ${result} \"custom-key2\": \"custom-value2\" Copy Object Where Bucket is not available + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc copy-object --bucket dfdfdfdfdfnonexistent --key ${PREFIX}/copyobject/key=value/f1 --copy-source ${BUCKET}/${PREFIX}/copyobject/key=value/f1 255 Should contain ${result} NoSuchBucket ${result} = Execute AWSS3APICli and checkrc copy-object --bucket ${DESTBUCKET} --key ${PREFIX}/copyobject/key=value/f1 --copy-source dfdfdfdfdfnonexistent/${PREFIX}/copyobject/key=value/f1 255 Should contain ${result} NoSuchBucket Copy Object Where both source and dest are same with change to storageclass + [Arguments] ${BUCKET} ${file_checksum} = Execute md5sum /tmp/copyfile | awk '{print $1}' ${result} = Execute AWSS3APICli copy-object --storage-class REDUCED_REDUNDANCY --bucket ${DESTBUCKET} --key ${PREFIX}/copyobject/key=value/f1 --copy-source ${DESTBUCKET}/${PREFIX}/copyobject/key=value/f1 Should contain ${result} ETag @@ -90,15 +93,49 @@ Copy Object Where both source and dest are same with change to storageclass Should Be Equal ${eTag} \"${file_checksum}\" Copy Object Where Key not available + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc copy-object --bucket ${DESTBUCKET} --key ${PREFIX}/copyobject/key=value/f1 --copy-source ${BUCKET}/nonnonexistentkey 255 Should contain ${result} NoSuchKey Copy Object using an invalid copy directive + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli and checkrc copy-object --bucket ${DESTBUCKET} --key ${PREFIX}/copyobject/key=value/f1 --copy-source ${BUCKET}/${PREFIX}/copyobject/key=value/f1 --metadata-directive INVALID 255 Should contain ${result} InvalidArgument Copy Object with user defined metadata size larger than 2 KB + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile2 ${custom_metadata_value} = Execute printf 'v%.0s' {1..3000} ${result} = Execute AWSS3ApiCli and checkrc copy-object --bucket ${DESTBUCKET} --key ${PREFIX}/copyobject/key=value/f1 --copy-source ${BUCKET}/${PREFIX}/copyobject/key=value/f1 --metadata="custom-key1=${custom_metadata_value}" --metadata-directive REPLACE 255 - Should contain ${result} MetadataTooLarge \ No newline at end of file + Should contain ${result} MetadataTooLarge + +*** Test Cases *** +Copy Object Happy Scenario with OBS + Copy Object Happy Scenario ${BUCKET} +Copy Object Happy Scenario with FSO + Copy Object Happy Scenario ${BUCKET1} + +Copy Object Where Bucket is not available with OBS + Copy Object Where Bucket is not available ${BUCKET} +Copy Object Where Bucket is not available with FSO + Copy Object Where Bucket is not available ${BUCKET1} + +Copy Object Where both source and dest are same with change to storageclass with OBS + Copy Object Where both source and dest are same with change to storageclass ${BUCKET} +Copy Object Where both source and dest are same with change to storageclass with FSO + Copy Object Where both source and dest are same with change to storageclass ${BUCKET1} + +Copy Object Where Key not available with OBS + Copy Object Where Key not available ${BUCKET} +Copy Object Where Key not available with FSO + Copy Object Where Key not available ${BUCKET1} + +Copy Object using an invalid copy directive with OBS + Copy Object using an invalid copy directive ${BUCKET} +Copy Object using an invalid copy directive with FSO + Copy Object using an invalid copy directive ${BUCKET1} + +Copy Object with user defined metadata size larger than 2 KB with OBS + Copy Object with user defined metadata size larger than 2 KB ${BUCKET} +Copy Object with user defined metadata size larger than 2 KB with FSO + Copy Object with user defined metadata size larger than 2 KB ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectcopys3a.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectcopys3a.robot index fead57ca31c9..a740a28ab225 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objectcopys3a.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectcopys3a.robot @@ -25,9 +25,11 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** +*** Keywords *** Put object s3a simulation + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile ${result} = Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/word.txt 255 ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix ${PREFIX}/word.txt/ @@ -46,4 +48,11 @@ Put object s3a simulation ${result} = Execute AWSS3ApiCli copy-object --bucket ${BUCKET} --key ${PREFIX}/word.txt --copy-source ${BUCKET}/${PREFIX}/word.txt._COPYING_ Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/word.txt 0 Execute AWSS3APICli delete-object --bucket ${BUCKET} --key ${PREFIX}/word.txt._COPYING_ - Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/word.txt._COPYING_ 255 \ No newline at end of file + Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/word.txt._COPYING_ 255 + +*** Test Cases *** +Put object s3a simulation with OBS + Put object s3a simulation ${BUCKET} + +Put object s3a simulation with FSO + Put object s3a simulation ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot index 73bf3dae6d91..5fde193c3181 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot @@ -25,9 +25,11 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** +*** Keywords *** Delete file with s3api + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/deletetestapi/key=value/f1 --body /tmp/testfile ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix ${PREFIX}/deletetestapi/key=value/ @@ -37,6 +39,7 @@ Delete file with s3api Should not contain ${result} "${PREFIX}/deletetestapi/key=value/f1" Delete file with s3api, file doesn't exist + [Arguments] ${BUCKET} ${result} = Execute AWSS3Cli ls s3://${BUCKET}/ Should not contain ${result} thereisnosuchfile ${result} = Execute AWSS3APICli delete-object --bucket ${BUCKET} --key thereisnosuchfile @@ -44,6 +47,7 @@ Delete file with s3api, file doesn't exist Should not contain ${result} thereisnosuchfile Delete dir with s3api + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/${PREFIX}/deletetestapidir/key=value/f1 ${result} = Execute AWSS3Cli ls s3://${BUCKET}/${PREFIX}/deletetestapidir/key=value/ @@ -55,6 +59,7 @@ Delete dir with s3api Delete file with s3api, file doesn't exist, prefix of a real file + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3Cli cp /tmp/testfile s3://${BUCKET}/${PREFIX}/deletetestapiprefix/key=value/filefile ${result} = Execute AWSS3Cli ls s3://${BUCKET}/${PREFIX}/deletetestapiprefix/key=value/ @@ -67,5 +72,33 @@ Delete file with s3api, file doesn't exist, prefix of a real file Delete file with s3api, bucket doesn't exist + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc delete-object --bucket ${BUCKET}-nosuchbucket --key f1 255 Should contain ${result} NoSuchBucket + +*** Test Cases *** +Delete file with s3api with OBS + Delete file with s3api ${BUCKET} +Delete file with s3api with FSO + Delete file with s3api ${BUCKET1} + +Delete file with s3api, file doesn't exist with OBS + Delete file with s3api, file doesn't exist ${BUCKET} +Delete file with s3api, file doesn't exist with FSO + Delete file with s3api, file doesn't exist ${BUCKET1} + +Delete dir with s3api with OBS + Delete dir with s3api ${BUCKET} +Delete dir with s3api with FSO + Delete dir with s3api ${BUCKET1} + +Delete file with s3api, file doesn't exist, prefix of a real file with OBS + Delete file with s3api, file doesn't exist, prefix of a real file ${BUCKET} + +Delete file with s3api, file doesn't exist, prefix of a real file with FSO + Delete file with s3api, file doesn't exist, prefix of a real file ${BUCKET1} + +Delete file with s3api, bucket doesn't exist with OBS + Delete file with s3api, bucket doesn't exist ${BUCKET} +Delete file with s3api, bucket doesn't exist with FSO + Delete file with s3api, bucket doesn't exist ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objecthead.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objecthead.robot index 66f3461b01dd..286054f6f76d 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objecthead.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objecthead.robot @@ -26,10 +26,12 @@ Suite Setup Setup s3 tests ${ENDPOINT_URL} http://s3g:9878 ${OZONE_TEST} true ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** +*** Keywords *** Head existing object + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile ${result} = Execute AWSS3APICli and checkrc put-object --bucket ${BUCKET} --key ${PREFIX}/headobject/key=value/f1 --body /tmp/testfile 0 @@ -37,10 +39,29 @@ Head existing object ${result} = Execute AWSS3APICli and checkrc delete-object --bucket ${BUCKET} --key ${PREFIX}/headobject/key=value/f1 0 Head object in non existing bucket + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET}-non-existent --key ${PREFIX}/headobject/key=value/f1 255 Should contain ${result} 404 Should contain ${result} Not Found +Head non existing key + [Arguments] ${BUCKET} + ${result} = Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/non-existent 255 + Should contain ${result} 404 + Should contain ${result} Not Found + +*** Test Cases *** + +Head existing object with OBS + Head existing object ${BUCKET} +Head existing object with FSO + Head existing object ${BUCKET1} + +Head object in non existing bucket with OBS + Head object in non existing bucket ${BUCKET} +Head object in non existing bucket with FSO + Head object in non existing bucket ${BUCKET1} + Head object where path is a directory Pass Execution If '${BUCKET_LAYOUT}' == 'FILE_SYSTEM_OPTIMIZED' does not apply to FSO buckets ${result} = Execute AWSS3APICli and checkrc put-object --bucket ${BUCKET} --key ${PREFIX}/headobject/keyvalue/f1 --body /tmp/testfile 0 @@ -56,7 +77,7 @@ Head directory objects Should contain ${result} Not Found ${result} = Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/mydir/ 0 -Head non existing key - ${result} = Execute AWSS3APICli and checkrc head-object --bucket ${BUCKET} --key ${PREFIX}/non-existent 255 - Should contain ${result} 404 - Should contain ${result} Not Found +Head non existing key with OBS + Head non existing key ${BUCKET} +Head non existing key with FSO + Head non existing key ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectmultidelete.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectmultidelete.robot index 37dc2d106bd7..9b240348d7f1 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objectmultidelete.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectmultidelete.robot @@ -25,10 +25,11 @@ Suite Setup Setup s3 tests *** Variables *** ${ENDPOINT_URL} http://s3g:9878 ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** - +*** Keywords *** Delete file with multi delete + [Arguments] ${BUCKET} Execute date > /tmp/testfile ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/multidelete/key=value/f1 --body /tmp/testfile ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/multidelete/key=value/f2 --body /tmp/testfile @@ -47,3 +48,11 @@ Delete file with multi delete Should contain ${result} ${PREFIX}/multidelete/key=value/f3 Should contain ${result} STANDARD Should not contain ${result} REDUCED_REDUNDANCY + +*** Test Cases *** + +Delete file with multi delete with OBS + Delete file with multi delete ${BUCKET} + +Delete file with multi delete with FSO + Delete file with multi delete ${BUCKET1} diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot index bbff89e71f83..fc945d8b1d30 100644 --- a/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot +++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot @@ -26,10 +26,11 @@ Suite Setup Setup s3 tests ${ENDPOINT_URL} http://s3g:9878 ${OZONE_TEST} true ${BUCKET} generated +${BUCKET1} generated -*** Test Cases *** - +*** Keywords *** Put object to s3 + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile ${result} = Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --body /tmp/testfile ${result} = Execute AWSS3ApiCli list-objects --bucket ${BUCKET} --prefix ${PREFIX}/putobject/key=value/ @@ -42,6 +43,7 @@ Put object to s3 #This test depends on the previous test case. Can't be executes alone Get object from s3 + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 /tmp/testfile.result Compare files /tmp/testfile /tmp/testfile.result ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/zerobyte /tmp/zerobyte.result @@ -49,11 +51,13 @@ Get object from s3 #This test depends on the previous test case. Can't be executed alone Get object with wrong signature + [Arguments] ${BUCKET} Pass Execution If '${SECURITY_ENABLED}' == 'false' Skip in unsecure cluster ${result} = Execute and Ignore Error curl -i -H 'Authorization: AWS scm/scm@EXAMPLE.COM:asdfqwerty' ${ENDPOINT_URL}/${BUCKET}/${PREFIX}/putobject/key=value/f1 Should contain ${result} 403 Forbidden Get Partial object from s3 with both start and endoffset + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=0-4 /tmp/testfile1.result Should contain ${result} ContentRange Should contain ${result} bytes 0-4/11 @@ -80,11 +84,13 @@ Get Partial object from s3 with both start and endoffset Should Be Equal ${expectedData} ${actualData} Get Partial object from s3 with both start and endoffset(start offset and endoffset is greater than file size) + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=10000-10000 /tmp/testfile2.result 255 Should contain ${result} InvalidRange Get Partial object from s3 with both start and endoffset(end offset is greater than file size) + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=0-10000 /tmp/testfile2.result Should contain ${result} ContentRange Should contain ${result} bytes 0-10/11 @@ -94,6 +100,7 @@ Get Partial object from s3 with both start and endoffset(end offset is greater t Should Be Equal ${expectedData} ${actualData} Get Partial object from s3 with only start offset + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=0- /tmp/testfile3.result Should contain ${result} ContentRange Should contain ${result} bytes 0-10/11 @@ -103,6 +110,7 @@ Get Partial object from s3 with only start offset Should Be Equal ${expectedData} ${actualData} Get Partial object from s3 with both start and endoffset which are equal + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=0-0 /tmp/testfile4.result Should contain ${result} ContentRange Should contain ${result} bytes 0-0/11 @@ -120,6 +128,7 @@ Get Partial object from s3 with both start and endoffset which are equal Should Be Equal ${expectedData} ${actualData} Get Partial object from s3 to get last n bytes + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=-4 /tmp/testfile6.result Should contain ${result} ContentRange Should contain ${result} bytes 7-10/11 @@ -138,6 +147,7 @@ Get Partial object from s3 to get last n bytes Should Be Equal ${expectedData} ${actualData} Incorrect values for end and start offset + [Arguments] ${BUCKET} ${result} = Execute AWSS3ApiCli get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/f1 --range bytes=-11-10000 /tmp/testfile8.result Should not contain ${result} ContentRange Should contain ${result} AcceptRanges @@ -153,6 +163,7 @@ Incorrect values for end and start offset Should Be Equal ${expectedData} ${actualData} Zero byte file + [Arguments] ${BUCKET} ${result} = Execute AWSS3APICli and checkrc get-object --bucket ${BUCKET} --key ${PREFIX}/putobject/key=value/zerobyte --range bytes=0-0 /tmp/testfile2.result 255 Should contain ${result} InvalidRange @@ -163,6 +174,7 @@ Zero byte file Should contain ${result} InvalidRange Create file with user defined metadata + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile2 Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/putobject/custom-metadata/key1 --body /tmp/testfile2 --metadata="custom-key1=custom-value1,custom-key2=custom-value2" @@ -175,6 +187,7 @@ Create file with user defined metadata Should contain ${result} \"custom-key2\" : \"custom-value2\" Create file with user defined metadata with gdpr enabled value in request + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile2 Execute AWSS3ApiCli put-object --bucket ${BUCKET} --key ${PREFIX}/putobject/custom-metadata/key2 --body /tmp/testfile2 --metadata="gdprEnabled=true,custom-key2=custom-value2" ${result} = Execute AWSS3ApiCli head-object --bucket ${BUCKET} --key ${PREFIX}/putobject/custom-metadata/key2 @@ -183,6 +196,7 @@ Create file with user defined metadata with gdpr enabled value in request Create file with user defined metadata size larger than 2 KB + [Arguments] ${BUCKET} Execute echo "Randomtext" > /tmp/testfile2 ${custom_metadata_value} = Execute printf 'v%.0s' {1..3000} ${result} = Execute AWSS3APICli and checkrc put-object --bucket ${BUCKET} --key ${PREFIX}/putobject/custom-metadata/key2 --body /tmp/testfile2 --metadata="custom-key1=${custom_metadata_value}" 255 @@ -190,6 +204,7 @@ Create file with user defined metadata size larger than 2 KB Should not contain ${result} custom-key1: ${custom_metadata_value} Create small file and expect ETag (MD5) in a reponse header + [Arguments] ${BUCKET} Execute head -c 1MB /tmp/small_file ${file_md5_checksum} = Execute md5sum /tmp/small_file | awk '{print $1}' ${result} = Execute AWSS3CliDebug cp /tmp/small_file s3://${BUCKET} @@ -197,6 +212,7 @@ Create small file and expect ETag (MD5) in a reponse header # The next two test cases depends on the previous one Download small file end expect ETag (MD5) in a response header + [Arguments] ${BUCKET} ${file_md5_checksum} = Execute md5sum /tmp/small_file | awk '{print $1}' ${result} = Execute AWSS3CliDebug cp s3://${BUCKET}/small_file /tmp/small_file_downloaded Should Match Regexp ${result} (?is)HEAD /${BUCKET}/small_file.*?Response headers.*?ETag': '"${file_md5_checksum}"' @@ -206,6 +222,7 @@ Download small file end expect ETag (MD5) in a response header Execute rm /tmp/small_file_downloaded Create key with custom etag metadata and expect it won't conflict with ETag response header of HEAD request + [Arguments] ${BUCKET} ${file_md5_checksum} Execute md5sum /tmp/small_file | awk '{print $1}' Execute AWSS3CliDebug cp --metadata "ETag=custom-etag-value" /tmp/small_file s3://${BUCKET}/test_file ${result} Execute AWSS3CliDebug cp s3://${BUCKET}/test_file /tmp/test_file_downloaded @@ -219,6 +236,7 @@ Create key with custom etag metadata and expect it won't conflict with ETag resp Execute rm -rf /tmp/test_file_downloaded Create&Download big file by multipart upload and expect ETag in a file download response + [Arguments] ${BUCKET} Execute head -c 10MB /tmp/big_file ${result} Execute AWSS3CliDebug cp /tmp/big_file s3://${BUCKET}/ ${match} ${etag1} Should Match Regexp ${result} (?is)POST /${BUCKET}/big_file\\?uploadId=.*?Response body.*?ETag>"(.*?-2)" @@ -230,6 +248,7 @@ Create&Download big file by multipart upload and expect ETag in a file download Execute rm -rf /tmp/big_file Create key twice with different content and expect different ETags + [Arguments] ${BUCKET} Execute head -c 1MiB /tmp/file1 Execute head -c 1MiB /tmp/file2 ${file1UploadResult} Execute AWSS3CliDebug cp /tmp/file1 s3://${BUCKET}/test_key_to_check_etag_differences @@ -241,3 +260,98 @@ Create key twice with different content and expect different ETags Execute AWSS3Cli rm s3://${BUCKET}/test_key_to_check_etag_differences Execute rm -rf /tmp/file1 Execute rm -rf /tmp/file2 + +*** Test Cases *** +Put object to s3 with OBS + Put object to s3 ${BUCKET} +Put object to s3 with FSO + Put object to s3 ${BUCKET1} + +Get object from s3 with OBS + Get object from s3 ${BUCKET} +Get object from s3 with FSO + Get object from s3 ${BUCKET1} + +Get object with wrong signature with OBS + Get object with wrong signature ${BUCKET} +Get object with wrong signature with FSO + Get object with wrong signature ${BUCKET1} + +Get Partial object from s3 with both start and endoffset with OBS + Get Partial object from s3 with both start and endoffset ${BUCKET} +Get Partial object from s3 with both start and endoffset with FSO + Get Partial object from s3 with both start and endoffset ${BUCKET1} + +Get Partial object from s3 with both start and endoffset(start offset and endoffset is greater than file size) with OBS + Get Partial object from s3 with both start and endoffset(start offset and endoffset is greater than file size) ${BUCKET} +Get Partial object from s3 with both start and endoffset(start offset and endoffset is greater than file size) with FSO + Get Partial object from s3 with both start and endoffset(start offset and endoffset is greater than file size) ${BUCKET1} + +Get Partial object from s3 with both start and endoffset(end offset is greater than file size) with OBS + Get Partial object from s3 with both start and endoffset(end offset is greater than file size) ${BUCKET} +Get Partial object from s3 with both start and endoffset(end offset is greater than file size) with FSO + Get Partial object from s3 with both start and endoffset(end offset is greater than file size) ${BUCKET1} + +Get Partial object from s3 with only start offset with OBS + Get Partial object from s3 with only start offset ${BUCKET} +Get Partial object from s3 with only start offset with FSO + Get Partial object from s3 with only start offset ${BUCKET1} + +Get Partial object from s3 with both start and endoffset which are equal with OBS + Get Partial object from s3 with both start and endoffset which are equal ${BUCKET} +Get Partial object from s3 with both start and endoffset which are equal with FSO + Get Partial object from s3 with both start and endoffset which are equal ${BUCKET1} + +Get Partial object from s3 to get last n bytes with OBS + Get Partial object from s3 to get last n bytes ${BUCKET} +Get Partial object from s3 to get last n bytes with FSO + Get Partial object from s3 to get last n bytes ${BUCKET1} + +Incorrect values for end and start offset with OBS + Incorrect values for end and start offset ${BUCKET} +Incorrect values for end and start offset with FSO + Incorrect values for end and start offset ${BUCKET1} + +Zero byte file with OBS + Zero byte file ${BUCKET} +Zero byte file with FSO + Zero byte file ${BUCKET1} + +Create file with user defined metadata with OBS + Create file with user defined metadata ${BUCKET} +Create file with user defined metadata with FSO + Create file with user defined metadata ${BUCKET1} + +Create file with user defined metadata with gdpr enabled value in request with OBS + Create file with user defined metadata with gdpr enabled value in request ${BUCKET} +Create file with user defined metadata with gdpr enabled value in request with FSO + Create file with user defined metadata with gdpr enabled value in request ${BUCKET1} + +Create file with user defined metadata size larger than 2 KB with OBS + Create file with user defined metadata size larger than 2 KB ${BUCKET} +Create file with user defined metadata size larger than 2 KB with FSO + Create file with user defined metadata size larger than 2 KB ${BUCKET1} + +Create small file and expect ETag (MD5) in a reponse header with OBS + Create small file and expect ETag (MD5) in a reponse header ${BUCKET} +Download small file end expect ETag (MD5) in a response header with OBS + Download small file end expect ETag (MD5) in a response header ${BUCKET} +Create key with custom etag metadata and expect it won't conflict with ETag response header of HEAD request with OBS + Create key with custom etag metadata and expect it won't conflict with ETag response header of HEAD request ${BUCKET} + +Create small file and expect ETag (MD5) in a reponse header with FSO + Create small file and expect ETag (MD5) in a reponse header ${BUCKET1} +Download small file end expect ETag (MD5) in a response header with FSO + Download small file end expect ETag (MD5) in a response header ${BUCKET1} +Create key with custom etag metadata and expect it won't conflict with ETag response header of HEAD request with FSO + Create key with custom etag metadata and expect it won't conflict with ETag response header of HEAD request ${BUCKET1} + +Create&Download big file by multipart upload and expect ETag in a file download response with OBS + Create&Download big file by multipart upload and expect ETag in a file download response ${BUCKET} +Create&Download big file by multipart upload and expect ETag in a file download response with FSO + Create&Download big file by multipart upload and expect ETag in a file download response ${BUCKET1} + +Create key twice with different content and expect different ETags with OBS + Create key twice with different content and expect different ETags ${BUCKET} +Create key twice with different content and expect different ETags with FSO + Create key twice with different content and expect different ETags ${BUCKET1}