aboutsummaryrefslogtreecommitdiffstats
path: root/lib/active_storage
diff options
context:
space:
mode:
authorDavid Heinemeier Hansson <david@loudthinking.com>2017-07-12 08:44:08 +0200
committerGitHub <noreply@github.com>2017-07-12 08:44:08 +0200
commit17906fd22f5c6bbb56f10ee3221a62569fb0d5c6 (patch)
treeaa9cd3c2ce487ad248ae1f276e1d251c0ba07e19 /lib/active_storage
parent6d3962461fb8d35fc9538d685fee96267663acf2 (diff)
downloadrails-17906fd22f5c6bbb56f10ee3221a62569fb0d5c6.tar.gz
rails-17906fd22f5c6bbb56f10ee3221a62569fb0d5c6.tar.bz2
rails-17906fd22f5c6bbb56f10ee3221a62569fb0d5c6.zip
Revert "S3: slim down service implementation (#40)" (#41)
This reverts commit 6d3962461fb8d35fc9538d685fee96267663acf2.
Diffstat (limited to 'lib/active_storage')
-rw-r--r--lib/active_storage/service/s3_service.rb60
1 files changed, 36 insertions, 24 deletions
diff --git a/lib/active_storage/service/s3_service.rb b/lib/active_storage/service/s3_service.rb
index ad55db0dc0..c3b6688bb9 100644
--- a/lib/active_storage/service/s3_service.rb
+++ b/lib/active_storage/service/s3_service.rb
@@ -4,72 +4,84 @@ require "active_support/core_ext/numeric/bytes"
class ActiveStorage::Service::S3Service < ActiveStorage::Service
attr_reader :client, :bucket
- def initialize(bucket:, client: nil, **client_options)
- @bucket = bucket
- @client = client || Aws::S3::Client.new(client_options)
+ def initialize(access_key_id:, secret_access_key:, region:, bucket:)
+ @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region)
+ @bucket = @client.bucket(bucket)
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
- client.put_object bucket: bucket, key: key, body: io, content_md5: checksum
+ object_for(key).put(body: io, content_md5: checksum)
rescue Aws::S3::Errors::BadDigest
raise ActiveStorage::IntegrityError
end
end
end
- def download(key, &block)
+ def download(key)
if block_given?
instrument :streaming_download, key do
- client.get_object bucket: bucket, key: key, &block
+ stream(key, &block)
end
else
instrument :download, key do
- "".b.tap do |data|
- client.get_object bucket: bucket, key: key, response_target: data
- end
+ object_for(key).get.body.read.force_encoding(Encoding::BINARY)
end
end
end
def delete(key)
instrument :delete, key do
- client.delete_object bucket: bucket, key: key
+ object_for(key).delete
end
end
def exist?(key)
instrument :exist, key do |payload|
- payload[:exist] =
- begin
- client.head_object bucket: bucket, key: key
- rescue Aws::S3::Errors::NoSuckKey
- false
- else
- true
- end
+ answer = object_for(key).exists?
+ payload[:exist] = answer
+ answer
end
end
def url(key, expires_in:, disposition:, filename:)
instrument :url, key do |payload|
- payload[:url] = presigner.presigned_url :get_object,
- bucket: bucket, key: key, expires_in: expires_in,
+ generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
response_content_disposition: "#{disposition}; filename=\"#{filename}\""
+
+ payload[:url] = generated_url
+
+ generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:)
instrument :url, key do |payload|
- payload[:url] = presigner.presigned_url :put_object,
- bucket: bucket, key: key, expires_in: expires_in,
+ generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
content_type: content_type, content_length: content_length
+
+ payload[:url] = generated_url
+
+ generated_url
end
end
private
- def presigner
- @presigner ||= Aws::S3::Presigner.new client: client
+ def object_for(key)
+ bucket.object(key)
+ end
+
+ # Reads the object for the given key in chunks, yielding each to the block.
+ def stream(key, options = {}, &block)
+ object = object_for(key)
+
+ chunk_size = 5.megabytes
+ offset = 0
+
+ while offset < object.content_length
+ yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
+ offset += chunk_size
+ end
end
end