aboutsummaryrefslogtreecommitdiffstats
path: root/activestorage/lib/active_storage/service/s3_service.rb
diff options
context:
space:
mode:
Diffstat (limited to 'activestorage/lib/active_storage/service/s3_service.rb')
-rw-r--r--activestorage/lib/active_storage/service/s3_service.rb134
1 files changed, 68 insertions, 66 deletions
diff --git a/activestorage/lib/active_storage/service/s3_service.rb b/activestorage/lib/active_storage/service/s3_service.rb
index ca461c2994..5153f5db0d 100644
--- a/activestorage/lib/active_storage/service/s3_service.rb
+++ b/activestorage/lib/active_storage/service/s3_service.rb
@@ -1,96 +1,98 @@
require "aws-sdk"
require "active_support/core_ext/numeric/bytes"
-# Wraps the Amazon Simple Storage Service (S3) as a Active Storage service.
-# See `ActiveStorage::Service` for the generic API documentation that applies to all services.
-class ActiveStorage::Service::S3Service < ActiveStorage::Service
- attr_reader :client, :bucket, :upload_options
+module ActiveStorage
+ # Wraps the Amazon Simple Storage Service (S3) as a Active Storage service.
+ # See `ActiveStorage::Service` for the generic API documentation that applies to all services.
+ class Service::S3Service < Service
+ attr_reader :client, :bucket, :upload_options
- def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
- @bucket = @client.bucket(bucket)
+ def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
+ @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
+ @bucket = @client.bucket(bucket)
- @upload_options = upload
- end
+ @upload_options = upload
+ end
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
- rescue Aws::S3::Errors::BadDigest
- raise ActiveStorage::IntegrityError
+ def upload(key, io, checksum: nil)
+ instrument :upload, key, checksum: checksum do
+ begin
+ object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
+ rescue Aws::S3::Errors::BadDigest
+ raise ActiveStorage::IntegrityError
+ end
end
end
- end
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- stream(key, &block)
- end
- else
- instrument :download, key do
- object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ def download(key)
+ if block_given?
+ instrument :streaming_download, key do
+ stream(key, &block)
+ end
+ else
+ instrument :download, key do
+ object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ end
end
end
- end
- def delete(key)
- instrument :delete, key do
- object_for(key).delete
+ def delete(key)
+ instrument :delete, key do
+ object_for(key).delete
+ end
end
- end
- def exist?(key)
- instrument :exist, key do |payload|
- answer = object_for(key).exists?
- payload[:exist] = answer
- answer
+ def exist?(key)
+ instrument :exist, key do |payload|
+ answer = object_for(key).exists?
+ payload[:exist] = answer
+ answer
+ end
end
- end
- def url(key, expires_in:, disposition:, filename:, content_type:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
- response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
- response_content_type: content_type
+ def url(key, expires_in:, disposition:, filename:, content_type:)
+ instrument :url, key do |payload|
+ generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
+ response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
+ response_content_type: content_type
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
- content_type: content_type, content_length: content_length, content_md5: checksum
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ instrument :url, key do |payload|
+ generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
+ content_type: content_type, content_length: content_length, content_md5: checksum
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
-
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum }
- end
- private
- def object_for(key)
- bucket.object(key)
+ def headers_for_direct_upload(key, content_type:, checksum:, **)
+ { "Content-Type" => content_type, "Content-MD5" => checksum }
end
- # Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
- object = object_for(key)
+ private
+ def object_for(key)
+ bucket.object(key)
+ end
+
+ # Reads the object for the given key in chunks, yielding each to the block.
+ def stream(key, options = {}, &block)
+ object = object_for(key)
- chunk_size = 5.megabytes
- offset = 0
+ chunk_size = 5.megabytes
+ offset = 0
- while offset < object.content_length
- yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
- offset += chunk_size
+ while offset < object.content_length
+ yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
+ offset += chunk_size
+ end
end
- end
+ end
end