aboutsummaryrefslogtreecommitdiffstats
path: root/activestorage/lib/active_storage/service
diff options
context:
space:
mode:
Diffstat (limited to 'activestorage/lib/active_storage/service')
-rw-r--r--activestorage/lib/active_storage/service/azure_storage_service.rb42
-rw-r--r--activestorage/lib/active_storage/service/disk_service.rb49
-rw-r--r--activestorage/lib/active_storage/service/gcs_service.rb66
-rw-r--r--activestorage/lib/active_storage/service/mirror_service.rb5
-rw-r--r--activestorage/lib/active_storage/service/s3_service.rb34
5 files changed, 128 insertions, 68 deletions
diff --git a/activestorage/lib/active_storage/service/azure_storage_service.rb b/activestorage/lib/active_storage/service/azure_storage_service.rb
index 895cc9c2f1..0a9eb7f23d 100644
--- a/activestorage/lib/active_storage/service/azure_storage_service.rb
+++ b/activestorage/lib/active_storage/service/azure_storage_service.rb
@@ -19,7 +19,7 @@ module ActiveStorage
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
blobs.create_block_blob(container, key, io, content_md5: checksum)
rescue Azure::Core::Http::HTTPError
@@ -28,13 +28,13 @@ module ActiveStorage
end
end
- def download(key)
+ def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
+ instrument :download, key: key do
_, io = blobs.get_blob(container, key)
io.force_encoding(Encoding::BINARY)
end
@@ -42,17 +42,33 @@ module ActiveStorage
end
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
blobs.delete_blob(container, key)
rescue Azure::Core::Http::HTTPError
- false
+ # Ignore files already deleted
+ end
+ end
+ end
+
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ marker = nil
+
+ loop do
+ results = blobs.list_blobs(container, prefix: prefix, marker: marker)
+
+ results.each do |blob|
+ blobs.delete_blob(container, blob.name)
+ end
+
+ break unless marker = results.continuation_token.presence
end
end
end
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = blob_for(key).present?
payload[:exist] = answer
answer
@@ -60,13 +76,13 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
base_url = url_for(key)
generated_url = signer.signed_uri(
URI(base_url), false,
permissions: "r",
expiry: format_expiry(expires_in),
- content_disposition: disposition,
+ content_disposition: content_disposition_with(type: disposition, filename: filename),
content_type: content_type
).to_s
@@ -77,7 +93,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
base_url = url_for(key)
generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
expiry: format_expiry(expires_in)).to_s
@@ -108,15 +124,15 @@ module ActiveStorage
end
# Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
+ def stream(key)
blob = blob_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < blob.properties[:content_length]
- _, io = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
- yield io
+ _, chunk = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
+ yield chunk.force_encoding(Encoding::BINARY)
offset += chunk_size
end
end
diff --git a/activestorage/lib/active_storage/service/disk_service.rb b/activestorage/lib/active_storage/service/disk_service.rb
index f600753a08..d17eea9046 100644
--- a/activestorage/lib/active_storage/service/disk_service.rb
+++ b/activestorage/lib/active_storage/service/disk_service.rb
@@ -9,14 +9,14 @@ module ActiveStorage
# Wraps a local disk path as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services.
class Service::DiskService < Service
- attr_reader :root
+ attr_reader :root, :host
- def initialize(root:)
- @root = root
+ def initialize(root:, host: "http://localhost:3000")
+ @root, @host = root, host
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
IO.copy_stream(io, make_path_for(key))
ensure_integrity_of(key, checksum) if checksum
end
@@ -24,7 +24,7 @@ module ActiveStorage
def download(key)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
File.open(path_for(key), "rb") do |file|
while data = file.read(64.kilobytes)
yield data
@@ -32,14 +32,14 @@ module ActiveStorage
end
end
else
- instrument :download, key do
+ instrument :download, key: key do
File.binread path_for(key)
end
end
end
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
begin
File.delete path_for(key)
rescue Errno::ENOENT
@@ -48,8 +48,16 @@ module ActiveStorage
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ Dir.glob(path_for("#{prefix}*")).each do |path|
+ FileUtils.rm_rf(path)
+ end
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = File.exist? path_for(key)
payload[:exist] = answer
answer
@@ -57,17 +65,17 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.rails_disk_service_path \
- verified_key_with_expiration,
- filename: filename, disposition: disposition, content_type: content_type
- else
- "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?content_type=#{content_type}&disposition=#{disposition}"
- end
+ Rails.application.routes.url_helpers.rails_disk_service_url(
+ verified_key_with_expiration,
+ filename: filename,
+ disposition: content_disposition_with(type: disposition, filename: filename),
+ content_type: content_type,
+ host: host
+ )
payload[:url] = generated_url
@@ -76,7 +84,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
verified_token_with_expiration = ActiveStorage.verifier.generate(
{
key: key,
@@ -88,12 +96,7 @@ module ActiveStorage
purpose: :blob_token }
)
- generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
- else
- "/rails/active_storage/disk/#{verified_token_with_expiration}"
- end
+ generated_url = Rails.application.routes.url_helpers.update_rails_disk_service_url(verified_token_with_expiration, host: host)
payload[:url] = generated_url
diff --git a/activestorage/lib/active_storage/service/gcs_service.rb b/activestorage/lib/active_storage/service/gcs_service.rb
index ebf24a36d7..6f6f4105fe 100644
--- a/activestorage/lib/active_storage/service/gcs_service.rb
+++ b/activestorage/lib/active_storage/service/gcs_service.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
+gem "google-cloud-storage", "~> 1.8"
+
require "google/cloud/storage"
require "active_support/core_ext/object/to_query"
@@ -7,50 +9,68 @@ module ActiveStorage
# Wraps the Google Cloud Storage as an Active Storage service. See ActiveStorage::Service for the generic API
# documentation that applies to all services.
class Service::GCSService < Service
- attr_reader :client, :bucket
-
- def initialize(project:, keyfile:, bucket:)
- @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile)
- @bucket = @client.bucket(bucket)
+ def initialize(**config)
+ @config = config
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
- bucket.create_file(io, key, md5: checksum)
+ # The official GCS client library doesn't allow us to create a file with no Content-Type metadata.
+ # We need the file we create to have no Content-Type so we can control it via the response-content-type
+ # param in signed URLs. Workaround: let the GCS client create the file with an inferred
+ # Content-Type (usually "application/octet-stream") then clear it.
+ bucket.create_file(io, key, md5: checksum).update do |file|
+ file.content_type = nil
+ end
rescue Google::Cloud::InvalidArgumentError
raise ActiveStorage::IntegrityError
end
end
end
- # FIXME: Add streaming when given a block
+ # FIXME: Download in chunks when given a block.
def download(key)
- instrument :download, key do
+ instrument :download, key: key do
io = file_for(key).download
io.rewind
- io.read
+
+ if block_given?
+ yield io.read
+ else
+ io.read
+ end
end
end
def delete(key)
- instrument :delete, key do
- file_for(key).try(:delete)
+ instrument :delete, key: key do
+ begin
+ file_for(key).delete
+ rescue Google::Cloud::NotFoundError
+ # Ignore files already deleted
+ end
+ end
+ end
+
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.files(prefix: prefix).all(&:delete)
end
end
def exist?(key)
- instrument :exist, key do |payload|
- answer = file_for(key).present?
+ instrument :exist, key: key do |payload|
+ answer = file_for(key).exists?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, filename:, content_type:, disposition:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = file_for(key).signed_url expires: expires_in, query: {
- "response-content-disposition" => disposition,
+ "response-content-disposition" => content_disposition_with(type: disposition, filename: filename),
"response-content-type" => content_type
}
@@ -61,7 +81,7 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
+ instrument :url, key: key do |payload|
generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
content_type: content_type, content_md5: checksum
@@ -76,8 +96,18 @@ module ActiveStorage
end
private
+ attr_reader :config
+
def file_for(key)
- bucket.file(key)
+ bucket.file(key, skip_lookup: true)
+ end
+
+ def bucket
+ @bucket ||= client.bucket(config.fetch(:bucket))
+ end
+
+ def client
+ @client ||= Google::Cloud::Storage.new(config.except(:bucket))
end
end
end
diff --git a/activestorage/lib/active_storage/service/mirror_service.rb b/activestorage/lib/active_storage/service/mirror_service.rb
index 39e922f7ab..7eca8ce7f4 100644
--- a/activestorage/lib/active_storage/service/mirror_service.rb
+++ b/activestorage/lib/active_storage/service/mirror_service.rb
@@ -35,6 +35,11 @@ module ActiveStorage
perform_across_services :delete, key
end
+ # Delete files at keys starting with the +prefix+ on all services.
+ def delete_prefixed(prefix)
+ perform_across_services :delete_prefixed, prefix
+ end
+
private
def each_service(&block)
[ primary, *mirrors ].each(&block)
diff --git a/activestorage/lib/active_storage/service/s3_service.rb b/activestorage/lib/active_storage/service/s3_service.rb
index e5d1e56e05..c95672f338 100644
--- a/activestorage/lib/active_storage/service/s3_service.rb
+++ b/activestorage/lib/active_storage/service/s3_service.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require "aws-sdk"
+require "aws-sdk-s3"
require "active_support/core_ext/numeric/bytes"
module ActiveStorage
@@ -17,7 +17,7 @@ module ActiveStorage
end
def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
+ instrument :upload, key: key, checksum: checksum do
begin
object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
rescue Aws::S3::Errors::BadDigest
@@ -26,26 +26,32 @@ module ActiveStorage
end
end
- def download(key)
+ def download(key, &block)
if block_given?
- instrument :streaming_download, key do
+ instrument :streaming_download, key: key do
stream(key, &block)
end
else
- instrument :download, key do
+ instrument :download, key: key do
object_for(key).get.body.read.force_encoding(Encoding::BINARY)
end
end
end
def delete(key)
- instrument :delete, key do
+ instrument :delete, key: key do
object_for(key).delete
end
end
+ def delete_prefixed(prefix)
+ instrument :delete_prefixed, prefix: prefix do
+ bucket.objects(prefix: prefix).batch_delete!
+ end
+ end
+
def exist?(key)
- instrument :exist, key do |payload|
+ instrument :exist, key: key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
@@ -53,9 +59,9 @@ module ActiveStorage
end
def url(key, expires_in:, filename:, disposition:, content_type:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
- response_content_disposition: disposition,
+ instrument :url, key: key do |payload|
+ generated_url = object_for(key).presigned_url :get, expires_in: expires_in.to_i,
+ response_content_disposition: content_disposition_with(type: disposition, filename: filename),
response_content_type: content_type
payload[:url] = generated_url
@@ -65,8 +71,8 @@ module ActiveStorage
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
+ instrument :url, key: key do |payload|
+ generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
content_type: content_type, content_length: content_length, content_md5: checksum
payload[:url] = generated_url
@@ -85,14 +91,14 @@ module ActiveStorage
end
# Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
+ def stream(key)
object = object_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < object.content_length
- yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
+ yield object.get(range: "bytes=#{offset}-#{offset + chunk_size - 1}").body.read.force_encoding(Encoding::BINARY)
offset += chunk_size
end
end