-
Notifications
You must be signed in to change notification settings - Fork 1.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
For example: obj = Aws::S3::Object.new('source-bucket', 'source-key') source.copy_to(bucket:'target-bucket', key:'target-key', multipart_copy: true)
- Loading branch information
1 parent
b684ad3
commit 8eb8083
Showing
13 changed files
with
571 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
# language: en | ||
@s3 @resources @multipart-copy | ||
Feature: Managed multipart copies | ||
|
||
Background: | ||
Given I create a bucket | ||
|
||
@slow | ||
Scenario: Copy-to across buckets | ||
Given a "source_bucket" is set in cfg["s3"]["large_object"]["bucket"] | ||
And a "source_key" is set in cfg["s3"]["large_object"]["key"] | ||
Then I should be able to multipart copy the object to a different bucket |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
61 changes: 61 additions & 0 deletions
61
aws-sdk-resources/lib/aws-sdk-resources/services/s3/object_copier.rb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
require 'thread' | ||
|
||
module Aws | ||
module S3 | ||
# @api private | ||
class ObjectCopier | ||
|
||
# @param [S3::Objecst] object | ||
def initialize(object, options = {}) | ||
@object = object | ||
@options = options.merge(client: @object.client) | ||
end | ||
|
||
def copy_from(source, options = {}) | ||
copy_object(source, @object, options) | ||
end | ||
|
||
|
||
def copy_to(target, options = {}) | ||
copy_object(@object, target, options) | ||
end | ||
|
||
private | ||
|
||
def copy_object(source, target, options) | ||
target_bucket, target_key = copy_target(target) | ||
options[:bucket] = target_bucket | ||
options[:key] = target_key | ||
options[:copy_source] = copy_source(source) | ||
if options.delete(:multipart_copy) | ||
ObjectMultipartCopier.new(@options).copy(options) | ||
else | ||
@object.client.copy_object(options) | ||
end | ||
end | ||
|
||
def copy_source(source) | ||
case source | ||
when String then source | ||
when Hash then "#{source[:bucket]}/#{source[:key]}" | ||
when S3::Object then "#{source.bucket_name}/#{source.key}" | ||
else | ||
msg = "expected source to be an Aws::S3::Object, Hash, or String" | ||
raise ArgumentError, msg | ||
end | ||
end | ||
|
||
def copy_target(target) | ||
case target | ||
when String then target.match(/([^\/]+?)\/(.+)/)[1,2] | ||
when Hash then target.values_at(:bucket, :key) | ||
when S3::Object then [target.bucket_name, target.key] | ||
else | ||
msg = "expected target to be an Aws::S3::Object, Hash, or String" | ||
raise ArgumentError, msg | ||
end | ||
end | ||
|
||
end | ||
end | ||
end |
171 changes: 171 additions & 0 deletions
171
aws-sdk-resources/lib/aws-sdk-resources/services/s3/object_multipart_copier.rb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,171 @@ | ||
require 'thread' | ||
|
||
module Aws | ||
module S3 | ||
# @api private | ||
class ObjectMultipartCopier | ||
|
||
FIVE_MB = 5 * 1024 * 1024 # 5MB | ||
|
||
FILE_TOO_SMALL = "unable to multipart copy files smaller than 5MB" | ||
|
||
MAX_PARTS = 10_000 | ||
|
||
# @option options [Client] :client | ||
# @option [Integer] :min_part_size (52428800) Size of copied parts. | ||
# Defaults to 50MB. | ||
# will be constructed from the given `options' hash. | ||
# @option [Integer] :thread_count (10) Number of concurrent threads to | ||
# use for copying parts. | ||
def initialize(options = {}) | ||
@thread_count = options.delete(:thread_count) || 10 | ||
@min_part_size = options.delete(:min_part_size) || (FIVE_MB * 10) | ||
@client = options[:client] || Client.new | ||
end | ||
|
||
# @return [Client] | ||
attr_reader :client | ||
|
||
# @option (see S3::Client#copy_object) | ||
def copy(options = {}) | ||
size = source_size(options) | ||
options[:upload_id] = initiate_upload(options) | ||
begin | ||
parts = copy_parts(size, default_part_size(size), options) | ||
complete_upload(parts, options) | ||
rescue => error | ||
abort_upload(options) | ||
raise error | ||
end | ||
end | ||
|
||
private | ||
|
||
def initiate_upload(options) | ||
options = options_for(:create_multipart_upload, options) | ||
@client.create_multipart_upload(options).upload_id | ||
end | ||
|
||
def copy_parts(size, default_part_size, options) | ||
queue = PartQueue.new(compute_parts(size, default_part_size, options)) | ||
threads = [] | ||
@thread_count.times do | ||
threads << copy_part_thread(queue) | ||
end | ||
threads.map(&:value).flatten.sort_by{ |part| part[:part_number] } | ||
end | ||
|
||
def copy_part_thread(queue) | ||
Thread.new do | ||
begin | ||
completed = [] | ||
while part = queue.shift | ||
completed << copy_part(part) | ||
end | ||
completed | ||
rescue => error | ||
queue.clear! | ||
raise error | ||
end | ||
end | ||
end | ||
|
||
def copy_part(part) | ||
{ | ||
etag: @client.upload_part_copy(part).copy_part_result.etag, | ||
part_number: part[:part_number], | ||
} | ||
end | ||
|
||
def complete_upload(parts, options) | ||
options = options_for(:complete_multipart_upload, options) | ||
options[:multipart_upload] = { parts: parts } | ||
@client.complete_multipart_upload(options) | ||
end | ||
|
||
def abort_upload(options) | ||
@client.abort_multipart_upload({ | ||
bucket: options[:bucket], | ||
key: options[:key], | ||
upload_id: options[:upload_id], | ||
}) | ||
end | ||
|
||
def compute_parts(size, default_part_size, options) | ||
part_number = 1 | ||
offset = 0 | ||
parts = [] | ||
options = options_for(:upload_part_copy, options) | ||
while offset < size | ||
parts << options.merge({ | ||
part_number: part_number, | ||
copy_source_range: byte_range(offset, default_part_size, size), | ||
}) | ||
part_number += 1 | ||
offset += default_part_size | ||
end | ||
parts | ||
end | ||
|
||
def byte_range(offset, default_part_size, size) | ||
if offset + default_part_size < size | ||
"bytes=#{offset}-#{offset + default_part_size - 1}" | ||
else | ||
"bytes=#{offset}-#{size - 1}" | ||
end | ||
end | ||
|
||
def source_size(options) | ||
if options[:content_length] | ||
options.delete(:content_length) | ||
else | ||
bucket, key = options[:copy_source].match(/([^\/]+?)\/(.+)/)[1,2] | ||
@client.head_object(bucket:bucket, key:key).content_length | ||
end | ||
end | ||
|
||
def default_part_size(source_size) | ||
if source_size < FIVE_MB | ||
raise ArgumentError, FILE_TOO_SMALL | ||
else | ||
[(source_size.to_f / MAX_PARTS).ceil, @min_part_size].max.to_i | ||
end | ||
end | ||
|
||
def options_for(operation_name, options) | ||
API_OPTIONS[operation_name].inject({}) do |hash, opt_name| | ||
hash[opt_name] = options[opt_name] if options.key?(opt_name) | ||
hash | ||
end | ||
end | ||
|
||
# @api private | ||
def self.options_for(shape_name) | ||
Client.api.metadata['shapes'][shape_name].member_names | ||
end | ||
|
||
API_OPTIONS = { | ||
create_multipart_upload: options_for('CreateMultipartUploadRequest'), | ||
upload_part_copy: options_for('UploadPartCopyRequest'), | ||
complete_multipart_upload: options_for('CompleteMultipartUploadRequest'), | ||
} | ||
|
||
class PartQueue | ||
|
||
def initialize(parts = []) | ||
@parts = parts | ||
@mutex = Mutex.new | ||
end | ||
|
||
def shift | ||
@mutex.synchronize { @parts.shift } | ||
end | ||
|
||
def clear! | ||
@mutex.synchronize { @parts.clear } | ||
end | ||
|
||
end | ||
end | ||
end | ||
end |
Oops, something went wrong.