aboutsummaryrefslogtreecommitdiffstats
path: root/activestorage/lib/active_storage/service
diff options
context:
space:
mode:
Diffstat (limited to 'activestorage/lib/active_storage/service')
-rw-r--r--activestorage/lib/active_storage/service/azure_storage_service.rb71
-rw-r--r--activestorage/lib/active_storage/service/disk_service.rb60
-rw-r--r--activestorage/lib/active_storage/service/gcs_service.rb79
-rw-r--r--activestorage/lib/active_storage/service/mirror_service.rb7
-rw-r--r--activestorage/lib/active_storage/service/s3_service.rb40
5 files changed, 179 insertions, 78 deletions
diff --git a/activestorage/lib/active_storage/service/azure_storage_service.rb b/activestorage/lib/active_storage/service/azure_storage_service.rb
index 895cc9c2f1..2867a4e441 100644
--- a/activestorage/lib/active_storage/service/azure_storage_service.rb
+++ b/activestorage/lib/active_storage/service/azure_storage_service.rb
@@ -8,18 +8,17 @@ module ActiveStorage
# Wraps the Microsoft Azure Storage Blob Service as an Active Storage service.
# See ActiveStorage::Service for the generic API documentation that applies to all services.
class Service::AzureStorageService < Service
- attr_reader :client, :path, :blobs, :container, :signer
+ attr_reader :client, :blobs, :container, :signer
- def initialize(path:, storage_account_name:, storage_access_key:, container:)
+ def initialize(storage_account_name:, storage_access_key:, container:)
@client = Azure::Storage::Client.create(storage_account_name: storage_account_name, storage_access_key: storage_access_key)
@signer = Azure::Storage::Core::Auth::SharedAccessSignature.new(storage_account_name, storage_access_key)
@blobs = client.blob_client
@container = container
- @path = path
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
blobs.create_block_blob(container, key, io, content_md5: checksum)
rescue Azure::Core::Http::HTTPError
@@ -28,31 +27,54 @@ module ActiveStorage
end
end
- def download(key)
+ def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
+ instrument :download, key: key do
_, io = blobs.get_blob(container, key)
io.force_encoding(Encoding::BINARY)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ _, io = blobs.get_blob(container, key, start_range: range.begin, end_range: range.exclude_end? ? range.end - 1 : range.end)
+ io.force_encoding(Encoding::BINARY)
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
blobs.delete_blob(container, key)
rescue Azure::Core::Http::HTTPError
- false
+ # Ignore files already deleted
+ end
+ end
+ end
+
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ marker = nil
+
+ loop do
+ results = blobs.list_blobs(container, prefix: prefix, marker: marker)
+
+ results.each do |blob|
+ blobs.delete_blob(container, blob.name)
+ end
+
+ break unless marker = results.continuation_token.presence
end
end
end
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = blob_for(key).present?
payload[:exist] = answer
answer
@@ -60,13 +82,13 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
- base_url = url_for(key)
+ instrument :url, key: key do |payload|
generated_url = signer.signed_uri(
- URI(base_url), false,
+ uri_for(key), false,
+ service: "b",
permissions: "r",
expiry: format_expiry(expires_in),
- content_disposition: disposition,
+ content_disposition: content_disposition_with(type: disposition, filename: filename),
content_type: content_type
).to_s
@@ -77,10 +99,13 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- base_url = url_for(key)
- generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
- expiry: format_expiry(expires_in)).to_s
+ instrument :url, key: key do |payload|
+ generated_url = signer.signed_uri(
+ uri_for(key), false,
+ service: "b",
+ permissions: "rw",
+ expiry: format_expiry(expires_in)
+ ).to_s
payload[:url] = generated_url
@@ -93,8 +118,8 @@ module ActiveStorage
end
private
- def url_for(key)
- "#{path}/#{container}/#{key}"
+ def uri_for(key)
+ blobs.generate_uri("#{container}/#{key}")
end
def blob_for(key)
@@ -108,15 +133,15 @@ module ActiveStorage
end
# Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
+ def stream(key)
blob = blob_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < blob.properties[:content_length]
- _, io = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
- yield io
+ _, chunk = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
+ yield chunk.force_encoding(Encoding::BINARY)
offset += chunk_size
end
end
diff --git a/activestorage/lib/active_storage/service/disk_service.rb b/activestorage/lib/active_storage/service/disk_service.rb
index f600753a08..5b652fe74e 100644
--- a/activestorage/lib/active_storage/service/disk_service.rb
+++ b/activestorage/lib/active_storage/service/disk_service.rb
@@ -16,7 +16,7 @@ module ActiveStorage
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
IO.copy_stream(io, make_path_for(key))
ensure_integrity_of(key, checksum) if checksum
end
@@ -24,7 +24,7 @@ module ActiveStorage
def download(key)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
File.open(path_for(key), "rb") do |file|
while data = file.read(64.kilobytes)
yield data
@@ -32,14 +32,23 @@ module ActiveStorage
end
end
else
- instrument :download, key do
+ instrument :download, key: key do
File.binread path_for(key)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ File.open(path_for(key), "rb") do |file|
+ file.seek range.begin
+ file.read range.size
+ end
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
File.delete path_for(key)
rescue Errno::ENOENT
@@ -48,8 +57,16 @@ module ActiveStorage
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ Dir.glob(path_for("#{prefix}*")).each do |path|
+ FileUtils.rm_rf(path)
+ end
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = File.exist? path_for(key)
payload[:exist] = answer
answer
@@ -57,17 +74,17 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.rails_disk_service_path \
- verified_key_with_expiration,
- filename: filename, disposition: disposition, content_type: content_type
- else
- "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?content_type=#{content_type}&disposition=#{disposition}"
- end
+ url_helpers.rails_disk_service_url(
+ verified_key_with_expiration,
+ host: current_host,
+ filename: filename,
+ disposition: content_disposition_with(type: disposition, filename: filename),
+ content_type: content_type
+ )
payload[:url] = generated_url
@@ -76,7 +93,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_token_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
@@ -88,12 +105,7 @@ module ActiveStorage
purpose: :blob_token }
)
- generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
- else
- "/rails/active_storage/disk/#{verified_token_with_expiration}"
- end
+ generated_url = url_helpers.update_rails_disk_service_url(verified_token_with_expiration, host: current_host)
payload[:url] = generated_url
@@ -124,5 +136,13 @@ module ActiveStorage
raise ActiveStorage::IntegrityError
end
end
+
+ def url_helpers
+ @url_helpers ||= Rails.application.routes.url_helpers
+ end
+
+ def current_host
+ ActiveStorage::Current.host
+ end
end
end
diff --git a/activestorage/lib/active_storage/service/gcs_service.rb b/activestorage/lib/active_storage/service/gcs_service.rb
index 685dd61a0a..16a0765fc5 100644
--- a/activestorage/lib/active_storage/service/gcs_service.rb
+++ b/activestorage/lib/active_storage/service/gcs_service.rb
@@ -1,40 +1,64 @@
# frozen_string_literal: true
+gem "google-cloud-storage", "~> 1.8"
+
require "google/cloud/storage"
+require "net/http"
+
require "active_support/core_ext/object/to_query"
+require "active_storage/filename"
module ActiveStorage
# Wraps the Google Cloud Storage as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services.
class Service::GCSService < Service
- attr_reader :client, :bucket
-
- def initialize(project:, keyfile:, bucket:, **options)
- @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile, **options)
- @bucket = @client.bucket(bucket)
+ def initialize(**config)
+ @config = config
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
- bucket.create_file(io, key, md5: checksum)
+ # The official GCS client library doesn't allow us to create a file with no Content-Type metadata.
+ # We need the file we create to have no Content-Type so we can control it via the response-content-type
+ # param in signed URLs. Workaround: let the GCS client create the file with an inferred
+ # Content-Type (usually "application/octet-stream") then clear it.
+ bucket.create_file(io, key, md5: checksum).update do |file|
+ file.content_type = nil
+ end
rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError
end
end
end
- # FIXME: Add streaming when given a block
+ # FIXME: Download in chunks when given a block.
def download(key)
- instrument :download, key do
+ instrument :download, key: key do
io = file_for(key).download
io.rewind
- io.read
+
+ if block_given?
+ yield io.read
+ else
+ io.read
+ end
+ end
+ end
+
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ file = file_for(key)
+ uri = URI(file.signed_url(expires: 30.seconds))
+
+ Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == "https") do |client|
+ client.get(uri, "Range" => "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body
+ end
end
end
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
file_for(key).delete
rescue Google::Cloud::NotFoundError
@@ -43,8 +67,14 @@ module ActiveStorage
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.files(prefix: prefix).all(&:delete)
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = file_for(key).exists?
payload[:exist] = answer
answer
@@ -52,9 +82,9 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, content_type:, disposition:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = file_for(key).signed_url expires: expires_in, query: {
- "response-content-disposition" => disposition,
+ "response-content-disposition" => content_disposition_with(type: disposition, filename: filename),
"response-content-type" => content_type
}
@@ -64,10 +94,9 @@ module ActiveStorage
end
end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
- content_type: content_type, content_md5: checksum
+ def url_for_direct_upload(key, expires_in:, checksum:, **)
+ instrument :url, key: key do |payload|
+ generated_url = bucket.signed_url key, method: "PUT", expires: expires_in, content_md5: checksum
payload[:url] = generated_url
@@ -75,13 +104,23 @@ module ActiveStorage
end
end
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum }
+ def headers_for_direct_upload(key, checksum:, **)
+ { "Content-MD5" => checksum }
end
private
+ attr_reader :config
+
def file_for(key)
bucket.file(key, skip_lookup: true)
end
+
+ def bucket
+ @bucket ||= client.bucket(config.fetch(:bucket))
+ end
+
+ def client
+ @client ||= Google::Cloud::Storage.new(config.except(:bucket))
+ end
end
end
diff --git a/activestorage/lib/active_storage/service/mirror_service.rb b/activestorage/lib/active_storage/service/mirror_service.rb
index 39e922f7ab..6002ef5a00 100644
--- a/activestorage/lib/active_storage/service/mirror_service.rb
+++ b/activestorage/lib/active_storage/service/mirror_service.rb
@@ -9,7 +9,7 @@ module ActiveStorage
class Service::MirrorService < Service
attr_reader :primary, :mirrors
- delegate :download, :exist?, :url, to: :primary
+ delegate :download, :download_chunk, :exist?, :url, to: :primary
# Stitch together from named services.
def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
@@ -35,6 +35,11 @@ module ActiveStorage
perform_across_services :delete, key
end
+ # Delete files at keys starting with the +prefix+ on all services.
+ def delete_prefixed(prefix)
+ perform_across_services :delete_prefixed, prefix
+ end
+
private
def each_service(&block)
[ primary, *mirrors ].each(&block)
diff --git a/activestorage/lib/active_storage/service/s3_service.rb b/activestorage/lib/active_storage/service/s3_service.rb
index e074269353..0286e7ff21 100644
--- a/activestorage/lib/active_storage/service/s3_service.rb
+++ b/activestorage/lib/active_storage/service/s3_service.rb
@@ -9,15 +9,15 @@ module ActiveStorage
class Service::S3Service < Service
attr_reader :client, :bucket, :upload_options
- def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
+ def initialize(bucket:, upload: {}, **options)
+ @client = Aws::S3::Resource.new(**options)
@bucket = @client.bucket(bucket)
@upload_options = upload
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
rescue Aws::S3::Errors::BadDigest
@@ -26,26 +26,38 @@ module ActiveStorage
end
end
- def download(key)
+ def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
- object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ instrument :download, key: key do
+ object_for(key).get.body.string.force_encoding(Encoding::BINARY)
end
end
end
+ def download_chunk(key, range)
+ instrument :download_chunk, key: key, range: range do
+ object_for(key).get(range: "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body.read.force_encoding(Encoding::BINARY)
+ end
+ end
+
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
object_for(key).delete
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.objects(prefix: prefix).batch_delete!
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
@@ -53,9 +65,9 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in.to_i,
- response_content_disposition: disposition,
+ response_content_disposition: content_disposition_with(type: disposition, filename: filename),
response_content_type: content_type
payload[:url] = generated_url
@@ -65,7 +77,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
content_type: content_type, content_length: content_length, content_md5: checksum
@@ -85,14 +97,14 @@ module ActiveStorage
end
# Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
+ def stream(key)
object = object_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < object.content_length
- yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
+ yield object.get(range: "bytes=#{offset}-#{offset + chunk_size - 1}").body.read.force_encoding(Encoding::BINARY)
offset += chunk_size
end
end