aboutsummaryrefslogtreecommitdiffstats
path: root/activestorage
diff options
context:
space:
mode:
authorclaudiob <claudiob@users.noreply.github.com>2017-08-04 16:33:40 -0700
committerclaudiob <claudiob@users.noreply.github.com>2017-08-04 16:40:10 -0700
commitbb7599a6c84aba44cbb5f21486ffdb4a549717dd (patch)
tree26dd9fba5a7f6c09fbfcec9fbf29b7088db88411 /activestorage
parent552840660389e39f3ba8e47dcf35ab817c01cb48 (diff)
downloadrails-bb7599a6c84aba44cbb5f21486ffdb4a549717dd.tar.gz
rails-bb7599a6c84aba44cbb5f21486ffdb4a549717dd.tar.bz2
rails-bb7599a6c84aba44cbb5f21486ffdb4a549717dd.zip
`module ActiveStorage`, not `ActiveStorage::Class`
The reasons for this commit are: - uniformity with the other Rails libraries - (possibly) behave better with respect to autoloading - fix the index in the generated documentation Before this commit, run `rake rdoc` generates this left sidebar (ActiveStorage entries are indexed twice, both inside and outside the module): <img width="308" alt="before" src="https://user-images.githubusercontent.com/10076/28939523-7c087dec-7846-11e7-9289-38ed4a2930cd.png"> After this commit, run `rake rdoc` generates this left sidebar: (ActiveStorage entries are only indexed inside the module): <img width="303" alt="after" src="https://user-images.githubusercontent.com/10076/28939524-7c090be0-7846-11e7-8ee5-29dfecae548e.png">
Diffstat (limited to 'activestorage')
-rw-r--r--activestorage/lib/active_storage/attached.rb46
-rw-r--r--activestorage/lib/active_storage/attached/macros.rb134
-rw-r--r--activestorage/lib/active_storage/attached/many.rb85
-rw-r--r--activestorage/lib/active_storage/attached/one.rb96
-rw-r--r--activestorage/lib/active_storage/log_subscriber.rb70
-rw-r--r--activestorage/lib/active_storage/service.rb195
-rw-r--r--activestorage/lib/active_storage/service/azure_storage_service.rb168
-rw-r--r--activestorage/lib/active_storage/service/configurator.rb44
-rw-r--r--activestorage/lib/active_storage/service/disk_service.rb187
-rw-r--r--activestorage/lib/active_storage/service/gcs_service.rb112
-rw-r--r--activestorage/lib/active_storage/service/mirror_service.rb74
-rw-r--r--activestorage/lib/active_storage/service/s3_service.rb134
12 files changed, 685 insertions, 660 deletions
diff --git a/activestorage/lib/active_storage/attached.rb b/activestorage/lib/active_storage/attached.rb
index 2dbf841864..5ac8ba5377 100644
--- a/activestorage/lib/active_storage/attached.rb
+++ b/activestorage/lib/active_storage/attached.rb
@@ -2,33 +2,35 @@ require "action_dispatch"
require "action_dispatch/http/upload"
require "active_support/core_ext/module/delegation"
+module ActiveStorage
# Abstract baseclass for the concrete `ActiveStorage::Attached::One` and `ActiveStorage::Attached::Many`
# classes that both provide proxy access to the blob association for a record.
-class ActiveStorage::Attached
- attr_reader :name, :record
+ class Attached
+ attr_reader :name, :record
- def initialize(name, record)
- @name, @record = name, record
- end
+ def initialize(name, record)
+ @name, @record = name, record
+ end
- private
- def create_blob_from(attachable)
- case attachable
- when ActiveStorage::Blob
- attachable
- when ActionDispatch::Http::UploadedFile
- ActiveStorage::Blob.create_after_upload! \
- io: attachable.open,
- filename: attachable.original_filename,
- content_type: attachable.content_type
- when Hash
- ActiveStorage::Blob.create_after_upload!(attachable)
- when String
- ActiveStorage::Blob.find_signed(attachable)
- else
- nil
+ private
+ def create_blob_from(attachable)
+ case attachable
+ when ActiveStorage::Blob
+ attachable
+ when ActionDispatch::Http::UploadedFile
+ ActiveStorage::Blob.create_after_upload! \
+ io: attachable.open,
+ filename: attachable.original_filename,
+ content_type: attachable.content_type
+ when Hash
+ ActiveStorage::Blob.create_after_upload!(attachable)
+ when String
+ ActiveStorage::Blob.find_signed(attachable)
+ else
+ nil
+ end
end
- end
+ end
end
require "active_storage/attached/one"
diff --git a/activestorage/lib/active_storage/attached/macros.rb b/activestorage/lib/active_storage/attached/macros.rb
index 89297e5bdf..fbd64b5edc 100644
--- a/activestorage/lib/active_storage/attached/macros.rb
+++ b/activestorage/lib/active_storage/attached/macros.rb
@@ -1,76 +1,78 @@
-# Provides the class-level DSL for declaring that an Active Record model has attached blobs.
-module ActiveStorage::Attached::Macros
- # Specifies the relation between a single attachment and the model.
- #
- # class User < ActiveRecord::Base
- # has_one_attached :avatar
- # end
- #
- # There is no column defined on the model side, Active Storage takes
- # care of the mapping between your records and the attachment.
- #
- # Under the covers, this relationship is implemented as a `has_one` association to a
- # `ActiveStorage::Attachment` record and a `has_one-through` association to a
- # `ActiveStorage::Blob` record. These associations are available as `avatar_attachment`
- # and `avatar_blob`. But you shouldn't need to work with these associations directly in
- # most circumstances.
- #
- # The system has been designed to having you go through the `ActiveStorage::Attached::One`
- # proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
- #
- # If the +:dependent+ option isn't set, the attachment will be purged
- # (i.e. destroyed) whenever the record is destroyed.
- def has_one_attached(name, dependent: :purge_later)
- define_method(name) do
- instance_variable_get("@active_storage_attached_#{name}") ||
- instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::One.new(name, self))
- end
+module ActiveStorage
+ # Provides the class-level DSL for declaring that an Active Record model has attached blobs.
+ module Attached::Macros
+ # Specifies the relation between a single attachment and the model.
+ #
+ # class User < ActiveRecord::Base
+ # has_one_attached :avatar
+ # end
+ #
+ # There is no column defined on the model side, Active Storage takes
+ # care of the mapping between your records and the attachment.
+ #
+ # Under the covers, this relationship is implemented as a `has_one` association to a
+ # `ActiveStorage::Attachment` record and a `has_one-through` association to a
+ # `ActiveStorage::Blob` record. These associations are available as `avatar_attachment`
+ # and `avatar_blob`. But you shouldn't need to work with these associations directly in
+ # most circumstances.
+ #
+ # The system has been designed to having you go through the `ActiveStorage::Attached::One`
+ # proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
+ #
+ # If the +:dependent+ option isn't set, the attachment will be purged
+ # (i.e. destroyed) whenever the record is destroyed.
+ def has_one_attached(name, dependent: :purge_later)
+ define_method(name) do
+ instance_variable_get("@active_storage_attached_#{name}") ||
+ instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::One.new(name, self))
+ end
- has_one :"#{name}_attachment", -> { where(name: name) }, class_name: "ActiveStorage::Attachment", as: :record
- has_one :"#{name}_blob", through: :"#{name}_attachment", class_name: "ActiveStorage::Blob", source: :blob
+ has_one :"#{name}_attachment", -> { where(name: name) }, class_name: "ActiveStorage::Attachment", as: :record
+ has_one :"#{name}_blob", through: :"#{name}_attachment", class_name: "ActiveStorage::Blob", source: :blob
- if dependent == :purge_later
- before_destroy { public_send(name).purge_later }
+ if dependent == :purge_later
+ before_destroy { public_send(name).purge_later }
+ end
end
- end
- # Specifies the relation between multiple attachments and the model.
- #
- # class Gallery < ActiveRecord::Base
- # has_many_attached :photos
- # end
- #
- # There are no columns defined on the model side, Active Storage takes
- # care of the mapping between your records and the attachments.
- #
- # To avoid N+1 queries, you can include the attached blobs in your query like so:
- #
- # Gallery.where(user: Current.user).with_attached_photos
- #
- # Under the covers, this relationship is implemented as a `has_many` association to a
- # `ActiveStorage::Attachment` record and a `has_many-through` association to a
- # `ActiveStorage::Blob` record. These associations are available as `photos_attachments`
- # and `photos_blobs`. But you shouldn't need to work with these associations directly in
- # most circumstances.
- #
- # The system has been designed to having you go through the `ActiveStorage::Attached::Many`
- # proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
- #
- # If the +:dependent+ option isn't set, all the attachments will be purged
- # (i.e. destroyed) whenever the record is destroyed.
- def has_many_attached(name, dependent: :purge_later)
- define_method(name) do
- instance_variable_get("@active_storage_attached_#{name}") ||
- instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::Many.new(name, self))
- end
+ # Specifies the relation between multiple attachments and the model.
+ #
+ # class Gallery < ActiveRecord::Base
+ # has_many_attached :photos
+ # end
+ #
+ # There are no columns defined on the model side, Active Storage takes
+ # care of the mapping between your records and the attachments.
+ #
+ # To avoid N+1 queries, you can include the attached blobs in your query like so:
+ #
+ # Gallery.where(user: Current.user).with_attached_photos
+ #
+ # Under the covers, this relationship is implemented as a `has_many` association to a
+ # `ActiveStorage::Attachment` record and a `has_many-through` association to a
+ # `ActiveStorage::Blob` record. These associations are available as `photos_attachments`
+ # and `photos_blobs`. But you shouldn't need to work with these associations directly in
+ # most circumstances.
+ #
+ # The system has been designed to having you go through the `ActiveStorage::Attached::Many`
+ # proxy that provides the dynamic proxy to the associations and factory methods, like `#attach`.
+ #
+ # If the +:dependent+ option isn't set, all the attachments will be purged
+ # (i.e. destroyed) whenever the record is destroyed.
+ def has_many_attached(name, dependent: :purge_later)
+ define_method(name) do
+ instance_variable_get("@active_storage_attached_#{name}") ||
+ instance_variable_set("@active_storage_attached_#{name}", ActiveStorage::Attached::Many.new(name, self))
+ end
- has_many :"#{name}_attachments", -> { where(name: name) }, as: :record, class_name: "ActiveStorage::Attachment"
- has_many :"#{name}_blobs", through: :"#{name}_attachments", class_name: "ActiveStorage::Blob", source: :blob
+ has_many :"#{name}_attachments", -> { where(name: name) }, as: :record, class_name: "ActiveStorage::Attachment"
+ has_many :"#{name}_blobs", through: :"#{name}_attachments", class_name: "ActiveStorage::Blob", source: :blob
- scope :"with_attached_#{name}", -> { includes("#{name}_attachments": :blob) }
+ scope :"with_attached_#{name}", -> { includes("#{name}_attachments": :blob) }
- if dependent == :purge_later
- before_destroy { public_send(name).purge_later }
+ if dependent == :purge_later
+ before_destroy { public_send(name).purge_later }
+ end
end
end
end
diff --git a/activestorage/lib/active_storage/attached/many.rb b/activestorage/lib/active_storage/attached/many.rb
index 035cd9c091..82989e4605 100644
--- a/activestorage/lib/active_storage/attached/many.rb
+++ b/activestorage/lib/active_storage/attached/many.rb
@@ -1,51 +1,54 @@
-# Decorated proxy object representing of multiple attachments to a model.
-class ActiveStorage::Attached::Many < ActiveStorage::Attached
- delegate_missing_to :attachments
+module ActiveStorage
+ # Decorated proxy object representing of multiple attachments to a model.
+ class Attached::Many < Attached
+ delegate_missing_to :attachments
- # Returns all the associated attachment records.
- #
- # All methods called on this proxy object that aren't listed here will automatically be delegated to `attachments`.
- def attachments
- record.public_send("#{name}_attachments")
- end
+ # Returns all the associated attachment records.
+ #
+ # All methods called on this proxy object that aren't listed here will automatically be delegated to `attachments`.
+ def attachments
+ record.public_send("#{name}_attachments")
+ end
- # Associates one or several attachments with the current record, saving them to the database.
- # Examples:
- #
- # document.images.attach(params[:images]) # Array of ActionDispatch::Http::UploadedFile objects
- # document.images.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
- # document.images.attach(io: File.open("~/racecar.jpg"), filename: "racecar.jpg", content_type: "image/jpg")
- # document.images.attach([ first_blob, second_blob ])
- def attach(*attachables)
- attachables.flatten.collect do |attachable|
- attachments.create!(name: name, blob: create_blob_from(attachable))
+ # Associates one or several attachments with the current record, saving them to the database.
+ # Examples:
+ #
+ # document.images.attach(params[:images]) # Array of ActionDispatch::Http::UploadedFile objects
+ # document.images.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
+ # document.images.attach(io: File.open("~/racecar.jpg"), filename: "racecar.jpg", content_type: "image/jpg")
+ # document.images.attach([ first_blob, second_blob ])
+ def attach(*attachables)
+ attachables.flatten.collect do |attachable|
+ attachments.create!(name: name, blob: create_blob_from(attachable))
+ end
end
- end
- # Returns true if any attachments has been made.
- #
- # class Gallery < ActiveRecord::Base
- # has_many_attached :photos
- # end
- #
- # Gallery.new.photos.attached? # => false
- def attached?
- attachments.any?
- end
+ # Returns true if any attachments has been made.
+ #
+ # class Gallery < ActiveRecord::Base
+ # has_many_attached :photos
+ # end
+ #
+ # Gallery.new.photos.attached? # => false
+ def attached?
+ attachments.any?
+ end
- # Directly purges each associated attachment (i.e. destroys the blobs and
- # attachments and deletes the files on the service).
- def purge
- if attached?
- attachments.each(&:purge)
- attachments.reload
+ # Directly purges each associated attachment (i.e. destroys the blobs and
+ # attachments and deletes the files on the service).
+ def purge
+ if attached?
+ attachments.each(&:purge)
+ attachments.reload
+ end
end
- end
- # Purges each associated attachment through the queuing system.
- def purge_later
- if attached?
- attachments.each(&:purge_later)
+ # Purges each associated attachment through the queuing system.
+ def purge_later
+ if attached?
+ attachments.each(&:purge_later)
+ end
end
end
end
+
diff --git a/activestorage/lib/active_storage/attached/one.rb b/activestorage/lib/active_storage/attached/one.rb
index 0c522e856e..6b34b30f1c 100644
--- a/activestorage/lib/active_storage/attached/one.rb
+++ b/activestorage/lib/active_storage/attached/one.rb
@@ -1,56 +1,58 @@
-# Representation of a single attachment to a model.
-class ActiveStorage::Attached::One < ActiveStorage::Attached
- delegate_missing_to :attachment
+module ActiveStorage
+ # Representation of a single attachment to a model.
+ class Attached::One < Attached
+ delegate_missing_to :attachment
- # Returns the associated attachment record.
- #
- # You don't have to call this method to access the attachment's methods as
- # they are all available at the model level.
- def attachment
- record.public_send("#{name}_attachment")
- end
-
- # Associates a given attachment with the current record, saving it to the database.
- # Examples:
- #
- # person.avatar.attach(params[:avatar]) # ActionDispatch::Http::UploadedFile object
- # person.avatar.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
- # person.avatar.attach(io: File.open("~/face.jpg"), filename: "face.jpg", content_type: "image/jpg")
- # person.avatar.attach(avatar_blob) # ActiveStorage::Blob object
- def attach(attachable)
- write_attachment \
- ActiveStorage::Attachment.create!(record: record, name: name, blob: create_blob_from(attachable))
- end
+ # Returns the associated attachment record.
+ #
+ # You don't have to call this method to access the attachment's methods as
+ # they are all available at the model level.
+ def attachment
+ record.public_send("#{name}_attachment")
+ end
- # Returns true if an attachment has been made.
- #
- # class User < ActiveRecord::Base
- # has_one_attached :avatar
- # end
- #
- # User.new.avatar.attached? # => false
- def attached?
- attachment.present?
- end
+ # Associates a given attachment with the current record, saving it to the database.
+ # Examples:
+ #
+ # person.avatar.attach(params[:avatar]) # ActionDispatch::Http::UploadedFile object
+ # person.avatar.attach(params[:signed_blob_id]) # Signed reference to blob from direct upload
+ # person.avatar.attach(io: File.open("~/face.jpg"), filename: "face.jpg", content_type: "image/jpg")
+ # person.avatar.attach(avatar_blob) # ActiveStorage::Blob object
+ def attach(attachable)
+ write_attachment \
+ ActiveStorage::Attachment.create!(record: record, name: name, blob: create_blob_from(attachable))
+ end
- # Directly purges the attachment (i.e. destroys the blob and
- # attachment and deletes the file on the service).
- def purge
- if attached?
- attachment.purge
- write_attachment nil
+ # Returns true if an attachment has been made.
+ #
+ # class User < ActiveRecord::Base
+ # has_one_attached :avatar
+ # end
+ #
+ # User.new.avatar.attached? # => false
+ def attached?
+ attachment.present?
end
- end
- # Purges the attachment through the queuing system.
- def purge_later
- if attached?
- attachment.purge_later
+ # Directly purges the attachment (i.e. destroys the blob and
+ # attachment and deletes the file on the service).
+ def purge
+ if attached?
+ attachment.purge
+ write_attachment nil
+ end
end
- end
- private
- def write_attachment(attachment)
- record.public_send("#{name}_attachment=", attachment)
+ # Purges the attachment through the queuing system.
+ def purge_later
+ if attached?
+ attachment.purge_later
+ end
end
+
+ private
+ def write_attachment(attachment)
+ record.public_send("#{name}_attachment=", attachment)
+ end
+ end
end
diff --git a/activestorage/lib/active_storage/log_subscriber.rb b/activestorage/lib/active_storage/log_subscriber.rb
index 4ac34a3b25..5c1b8d23ef 100644
--- a/activestorage/lib/active_storage/log_subscriber.rb
+++ b/activestorage/lib/active_storage/log_subscriber.rb
@@ -1,48 +1,50 @@
require "active_support/log_subscriber"
-class ActiveStorage::LogSubscriber < ActiveSupport::LogSubscriber
- def service_upload(event)
- message = "Uploaded file to key: #{key_in(event)}"
- message << " (checksum: #{event.payload[:checksum]})" if event.payload[:checksum]
- info event, color(message, GREEN)
- end
-
- def service_download(event)
- info event, color("Downloaded file from key: #{key_in(event)}", BLUE)
- end
-
- def service_delete(event)
- info event, color("Deleted file from key: #{key_in(event)}", RED)
- end
-
- def service_exist(event)
- debug event, color("Checked if file exist at key: #{key_in(event)} (#{event.payload[:exist] ? "yes" : "no"})", BLUE)
- end
-
- def service_url(event)
- debug event, color("Generated URL for file at key: #{key_in(event)} (#{event.payload[:url]})", BLUE)
- end
+module ActiveStorage
+ class LogSubscriber < ActiveSupport::LogSubscriber
+ def service_upload(event)
+ message = "Uploaded file to key: #{key_in(event)}"
+ message << " (checksum: #{event.payload[:checksum]})" if event.payload[:checksum]
+ info event, color(message, GREEN)
+ end
- def logger
- ActiveStorage::Service.logger
- end
+ def service_download(event)
+ info event, color("Downloaded file from key: #{key_in(event)}", BLUE)
+ end
- private
- def info(event, colored_message)
- super log_prefix_for_service(event) + colored_message
+ def service_delete(event)
+ info event, color("Deleted file from key: #{key_in(event)}", RED)
end
- def debug(event, colored_message)
- super log_prefix_for_service(event) + colored_message
+ def service_exist(event)
+ debug event, color("Checked if file exist at key: #{key_in(event)} (#{event.payload[:exist] ? "yes" : "no"})", BLUE)
end
- def log_prefix_for_service(event)
- color " #{event.payload[:service]} Storage (#{event.duration.round(1)}ms) ", CYAN
+ def service_url(event)
+ debug event, color("Generated URL for file at key: #{key_in(event)} (#{event.payload[:url]})", BLUE)
end
- def key_in(event)
- event.payload[:key]
+ def logger
+ ActiveStorage::Service.logger
end
+
+ private
+ def info(event, colored_message)
+ super log_prefix_for_service(event) + colored_message
+ end
+
+ def debug(event, colored_message)
+ super log_prefix_for_service(event) + colored_message
+ end
+
+ def log_prefix_for_service(event)
+ color " #{event.payload[:service]} Storage (#{event.duration.round(1)}ms) ", CYAN
+ end
+
+ def key_in(event)
+ event.payload[:key]
+ end
+ end
end
ActiveStorage::LogSubscriber.attach_to :active_storage
diff --git a/activestorage/lib/active_storage/service.rb b/activestorage/lib/active_storage/service.rb
index 4223295ed8..eb25e9f001 100644
--- a/activestorage/lib/active_storage/service.rb
+++ b/activestorage/lib/active_storage/service.rb
@@ -1,114 +1,115 @@
require "active_storage/log_subscriber"
-# Abstract class serving as an interface for concrete services.
-#
-# The available services are:
-#
-# * +Disk+, to manage attachments saved directly on the hard drive.
-# * +GCS+, to manage attachments through Google Cloud Storage.
-# * +S3+, to manage attachments through Amazon S3.
-# * +AzureStorage+, to manage attachments through Microsoft Azure Storage.
-# * +Mirror+, to be able to use several services to manage attachments.
-#
-# Inside a Rails application, you can set-up your services through the
-# generated <tt>config/storage.yml</tt> file and reference one
-# of the aforementioned constant under the +service+ key. For example:
-#
-# local:
-# service: Disk
-# root: <%= Rails.root.join("storage") %>
-#
-# You can checkout the service's constructor to know which keys are required.
-#
-# Then, in your application's configuration, you can specify the service to
-# use like this:
-#
-# config.active_storage.service = :local
-#
-# If you are using Active Storage outside of a Ruby on Rails application, you
-# can configure the service to use like this:
-#
-# ActiveStorage::Blob.service = ActiveStorage::Service.configure(
-# :Disk,
-# root: Pathname("/foo/bar/storage")
-# )
-class ActiveStorage::Service
- class ActiveStorage::IntegrityError < StandardError; end
+module ActiveStorage
+ class IntegrityError < StandardError; end
+ # Abstract class serving as an interface for concrete services.
+ #
+ # The available services are:
+ #
+ # * +Disk+, to manage attachments saved directly on the hard drive.
+ # * +GCS+, to manage attachments through Google Cloud Storage.
+ # * +S3+, to manage attachments through Amazon S3.
+ # * +AzureStorage+, to manage attachments through Microsoft Azure Storage.
+ # * +Mirror+, to be able to use several services to manage attachments.
+ #
+ # Inside a Rails application, you can set-up your services through the
+ # generated <tt>config/storage.yml</tt> file and reference one
+ # of the aforementioned constant under the +service+ key. For example:
+ #
+ # local:
+ # service: Disk
+ # root: <%= Rails.root.join("storage") %>
+ #
+ # You can checkout the service's constructor to know which keys are required.
+ #
+ # Then, in your application's configuration, you can specify the service to
+ # use like this:
+ #
+ # config.active_storage.service = :local
+ #
+ # If you are using Active Storage outside of a Ruby on Rails application, you
+ # can configure the service to use like this:
+ #
+ # ActiveStorage::Blob.service = ActiveStorage::Service.configure(
+ # :Disk,
+ # root: Pathname("/foo/bar/storage")
+ # )
+ class Service
+ extend ActiveSupport::Autoload
+ autoload :Configurator
- extend ActiveSupport::Autoload
- autoload :Configurator
+ class_attribute :logger
- class_attribute :logger
+ class << self
+ # Configure an Active Storage service by name from a set of configurations,
+ # typically loaded from a YAML file. The Active Storage engine uses this
+ # to set the global Active Storage service when the app boots.
+ def configure(service_name, configurations)
+ Configurator.build(service_name, configurations)
+ end
- class << self
- # Configure an Active Storage service by name from a set of configurations,
- # typically loaded from a YAML file. The Active Storage engine uses this
- # to set the global Active Storage service when the app boots.
- def configure(service_name, configurations)
- Configurator.build(service_name, configurations)
+ # Override in subclasses that stitch together multiple services and hence
+ # need to build additional services using the configurator.
+ #
+ # Passes the configurator and all of the service's config as keyword args.
+ #
+ # See MirrorService for an example.
+ def build(configurator:, service: nil, **service_config) #:nodoc:
+ new(**service_config)
+ end
end
- # Override in subclasses that stitch together multiple services and hence
- # need to build additional services using the configurator.
- #
- # Passes the configurator and all of the service's config as keyword args.
- #
- # See MirrorService for an example.
- def build(configurator:, service: nil, **service_config) #:nodoc:
- new(**service_config)
+ # Upload the `io` to the `key` specified. If a `checksum` is provided, the service will
+ # ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
+ def upload(key, io, checksum: nil)
+ raise NotImplementedError
end
- end
-
- # Upload the `io` to the `key` specified. If a `checksum` is provided, the service will
- # ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
- def upload(key, io, checksum: nil)
- raise NotImplementedError
- end
-
- # Return the content of the file at the `key`.
- def download(key)
- raise NotImplementedError
- end
- # Delete the file at the `key`.
- def delete(key)
- raise NotImplementedError
- end
-
- # Return true if a file exists at the `key`.
- def exist?(key)
- raise NotImplementedError
- end
+ # Return the content of the file at the `key`.
+ def download(key)
+ raise NotImplementedError
+ end
- # Returns a signed, temporary URL for the file at the `key`. The URL will be valid for the amount
- # of seconds specified in `expires_in`. You most also provide the `disposition` (`:inline` or `:attachment`),
- # `filename`, and `content_type` that you wish the file to be served with on request.
- def url(key, expires_in:, disposition:, filename:, content_type:)
- raise NotImplementedError
- end
+ # Delete the file at the `key`.
+ def delete(key)
+ raise NotImplementedError
+ end
- # Returns a signed, temporary URL that a direct upload file can be PUT to on the `key`.
- # The URL will be valid for the amount of seconds specified in `expires_in`.
- # You most also provide the `content_type`, `content_length`, and `checksum` of the file
- # that will be uploaded. All these attributes will be validated by the service upon upload.
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- raise NotImplementedError
- end
+ # Return true if a file exists at the `key`.
+ def exist?(key)
+ raise NotImplementedError
+ end
- # Returns a Hash of headers for `url_for_direct_upload` requests.
- def headers_for_direct_upload(key, filename:, content_type:, content_length:, checksum:)
- {}
- end
+ # Returns a signed, temporary URL for the file at the `key`. The URL will be valid for the amount
+ # of seconds specified in `expires_in`. You most also provide the `disposition` (`:inline` or `:attachment`),
+ # `filename`, and `content_type` that you wish the file to be served with on request.
+ def url(key, expires_in:, disposition:, filename:, content_type:)
+ raise NotImplementedError
+ end
- private
- def instrument(operation, key, payload = {}, &block)
- ActiveSupport::Notifications.instrument(
- "service_#{operation}.active_storage",
- payload.merge(key: key, service: service_name), &block)
+ # Returns a signed, temporary URL that a direct upload file can be PUT to on the `key`.
+ # The URL will be valid for the amount of seconds specified in `expires_in`.
+ # You most also provide the `content_type`, `content_length`, and `checksum` of the file
+ # that will be uploaded. All these attributes will be validated by the service upon upload.
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ raise NotImplementedError
end
- def service_name
- # ActiveStorage::Service::DiskService => Disk
- self.class.name.split("::").third.remove("Service")
+ # Returns a Hash of headers for `url_for_direct_upload` requests.
+ def headers_for_direct_upload(key, filename:, content_type:, content_length:, checksum:)
+ {}
end
+
+ private
+ def instrument(operation, key, payload = {}, &block)
+ ActiveSupport::Notifications.instrument(
+ "service_#{operation}.active_storage",
+ payload.merge(key: key, service: service_name), &block)
+ end
+
+ def service_name
+ # ActiveStorage::Service::DiskService => Disk
+ self.class.name.split("::").third.remove("Service")
+ end
+ end
end
diff --git a/activestorage/lib/active_storage/service/azure_storage_service.rb b/activestorage/lib/active_storage/service/azure_storage_service.rb
index 527dc57eeb..62b13433fe 100644
--- a/activestorage/lib/active_storage/service/azure_storage_service.rb
+++ b/activestorage/lib/active_storage/service/azure_storage_service.rb
@@ -2,114 +2,116 @@ require "active_support/core_ext/numeric/bytes"
require "azure/storage"
require "azure/storage/core/auth/shared_access_signature"
-# Wraps the Microsoft Azure Storage Blob Service as a Active Storage service.
-# See `ActiveStorage::Service` for the generic API documentation that applies to all services.
-class ActiveStorage::Service::AzureStorageService < ActiveStorage::Service
- attr_reader :client, :path, :blobs, :container, :signer
-
- def initialize(path:, storage_account_name:, storage_access_key:, container:)
- @client = Azure::Storage::Client.create(storage_account_name: storage_account_name, storage_access_key: storage_access_key)
- @signer = Azure::Storage::Core::Auth::SharedAccessSignature.new(storage_account_name, storage_access_key)
- @blobs = client.blob_client
- @container = container
- @path = path
- end
+module ActiveStorage
+ # Wraps the Microsoft Azure Storage Blob Service as a Active Storage service.
+ # See `ActiveStorage::Service` for the generic API documentation that applies to all services.
+ class Service::AzureStorageService < Service
+ attr_reader :client, :path, :blobs, :container, :signer
+
+ def initialize(path:, storage_account_name:, storage_access_key:, container:)
+ @client = Azure::Storage::Client.create(storage_account_name: storage_account_name, storage_access_key: storage_access_key)
+ @signer = Azure::Storage::Core::Auth::SharedAccessSignature.new(storage_account_name, storage_access_key)
+ @blobs = client.blob_client
+ @container = container
+ @path = path
+ end
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- blobs.create_block_blob(container, key, io, content_md5: checksum)
- rescue Azure::Core::Http::HTTPError => e
- raise ActiveStorage::IntegrityError
+ def upload(key, io, checksum: nil)
+ instrument :upload, key, checksum: checksum do
+ begin
+ blobs.create_block_blob(container, key, io, content_md5: checksum)
+ rescue Azure::Core::Http::HTTPError => e
+ raise ActiveStorage::IntegrityError
+ end
end
end
- end
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- stream(key, &block)
- end
- else
- instrument :download, key do
- _, io = blobs.get_blob(container, key)
- io.force_encoding(Encoding::BINARY)
+ def download(key)
+ if block_given?
+ instrument :streaming_download, key do
+ stream(key, &block)
+ end
+ else
+ instrument :download, key do
+ _, io = blobs.get_blob(container, key)
+ io.force_encoding(Encoding::BINARY)
+ end
end
end
- end
- def delete(key)
- instrument :delete, key do
- begin
- blobs.delete_blob(container, key)
- rescue Azure::Core::Http::HTTPError
- false
+ def delete(key)
+ instrument :delete, key do
+ begin
+ blobs.delete_blob(container, key)
+ rescue Azure::Core::Http::HTTPError
+ false
+ end
end
end
- end
- def exist?(key)
- instrument :exist, key do |payload|
- answer = blob_for(key).present?
- payload[:exist] = answer
- answer
+ def exist?(key)
+ instrument :exist, key do |payload|
+ answer = blob_for(key).present?
+ payload[:exist] = answer
+ answer
+ end
end
- end
- def url(key, expires_in:, disposition:, filename:)
- instrument :url, key do |payload|
- base_url = url_for(key)
- generated_url = signer.signed_uri(URI(base_url), false, permissions: "r",
- expiry: format_expiry(expires_in), content_disposition: "#{disposition}; filename=\"#{filename}\"").to_s
+ def url(key, expires_in:, disposition:, filename:)
+ instrument :url, key do |payload|
+ base_url = url_for(key)
+ generated_url = signer.signed_uri(URI(base_url), false, permissions: "r",
+ expiry: format_expiry(expires_in), content_disposition: "#{disposition}; filename=\"#{filename}\"").to_s
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- base_url = url_for(key)
- generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
- expiry: format_expiry(expires_in)).to_s
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ instrument :url, key do |payload|
+ base_url = url_for(key)
+ generated_url = signer.signed_uri(URI(base_url), false, permissions: "rw",
+ expiry: format_expiry(expires_in)).to_s
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
-
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum, "x-ms-blob-type" => "BlockBlob" }
- end
- private
- def url_for(key)
- "#{path}/#{container}/#{key}"
+ def headers_for_direct_upload(key, content_type:, checksum:, **)
+ { "Content-Type" => content_type, "Content-MD5" => checksum, "x-ms-blob-type" => "BlockBlob" }
end
- def blob_for(key)
- blobs.get_blob_properties(container, key)
- rescue Azure::Core::Http::HTTPError
- false
- end
+ private
+ def url_for(key)
+ "#{path}/#{container}/#{key}"
+ end
- def format_expiry(expires_in)
- expires_in ? Time.now.utc.advance(seconds: expires_in).iso8601 : nil
- end
+ def blob_for(key)
+ blobs.get_blob_properties(container, key)
+ rescue Azure::Core::Http::HTTPError
+ false
+ end
+
+ def format_expiry(expires_in)
+ expires_in ? Time.now.utc.advance(seconds: expires_in).iso8601 : nil
+ end
- # Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
- blob = blob_for(key)
+ # Reads the object for the given key in chunks, yielding each to the block.
+ def stream(key, options = {}, &block)
+ blob = blob_for(key)
- chunk_size = 5.megabytes
- offset = 0
+ chunk_size = 5.megabytes
+ offset = 0
- while offset < blob.properties[:content_length]
- _, io = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
- yield io
- offset += chunk_size
+ while offset < blob.properties[:content_length]
+ _, io = blobs.get_blob(container, key, start_range: offset, end_range: offset + chunk_size - 1)
+ yield io
+ offset += chunk_size
+ end
end
- end
+ end
end
diff --git a/activestorage/lib/active_storage/service/configurator.rb b/activestorage/lib/active_storage/service/configurator.rb
index a0afdaa912..5d6475a8ae 100644
--- a/activestorage/lib/active_storage/service/configurator.rb
+++ b/activestorage/lib/active_storage/service/configurator.rb
@@ -1,28 +1,30 @@
-class ActiveStorage::Service::Configurator #:nodoc:
- attr_reader :configurations
+module ActiveStorage
+ class Service::Configurator #:nodoc:
+ attr_reader :configurations
- def self.build(service_name, configurations)
- new(configurations).build(service_name)
- end
+ def self.build(service_name, configurations)
+ new(configurations).build(service_name)
+ end
- def initialize(configurations)
- @configurations = configurations.deep_symbolize_keys
- end
+ def initialize(configurations)
+ @configurations = configurations.deep_symbolize_keys
+ end
- def build(service_name)
- config = config_for(service_name.to_sym)
- resolve(config.fetch(:service)).build(**config, configurator: self)
- end
+ def build(service_name)
+ config = config_for(service_name.to_sym)
+ resolve(config.fetch(:service)).build(**config, configurator: self)
+ end
- private
- def config_for(name)
- configurations.fetch name do
- raise "Missing configuration for the #{name.inspect} Active Storage service. Configurations available for #{configurations.keys.inspect}"
+ private
+ def config_for(name)
+ configurations.fetch name do
+ raise "Missing configuration for the #{name.inspect} Active Storage service. Configurations available for #{configurations.keys.inspect}"
+ end
end
- end
- def resolve(class_name)
- require "active_storage/service/#{class_name.to_s.underscore}_service"
- ActiveStorage::Service.const_get(:"#{class_name}Service")
- end
+ def resolve(class_name)
+ require "active_storage/service/#{class_name.to_s.underscore}_service"
+ ActiveStorage::Service.const_get(:"#{class_name}Service")
+ end
+ end
end
diff --git a/activestorage/lib/active_storage/service/disk_service.rb b/activestorage/lib/active_storage/service/disk_service.rb
index 35b0909297..3d92102cf0 100644
--- a/activestorage/lib/active_storage/service/disk_service.rb
+++ b/activestorage/lib/active_storage/service/disk_service.rb
@@ -3,122 +3,125 @@ require "pathname"
require "digest/md5"
require "active_support/core_ext/numeric/bytes"
-# Wraps a local disk path as a Active Storage service. See `ActiveStorage::Service` for the generic API
-# documentation that applies to all services.
-class ActiveStorage::Service::DiskService < ActiveStorage::Service
- attr_reader :root
-
- def initialize(root:)
- @root = root
- end
+module ActiveStorage
+ # Wraps a local disk path as a Active Storage service. See `ActiveStorage::Service` for the generic API
+ # documentation that applies to all services.
+ class Service::DiskService < Service
+ attr_reader :root
+
+ def initialize(root:)
+ @root = root
+ end
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- IO.copy_stream(io, make_path_for(key))
- ensure_integrity_of(key, checksum) if checksum
+ def upload(key, io, checksum: nil)
+ instrument :upload, key, checksum: checksum do
+ IO.copy_stream(io, make_path_for(key))
+ ensure_integrity_of(key, checksum) if checksum
+ end
end
- end
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- File.open(path_for(key), "rb") do |file|
- while data = file.read(64.kilobytes)
- yield data
+ def download(key)
+ if block_given?
+ instrument :streaming_download, key do
+ File.open(path_for(key), "rb") do |file|
+ while data = file.read(64.kilobytes)
+ yield data
+ end
end
end
- end
- else
- instrument :download, key do
- File.binread path_for(key)
+ else
+ instrument :download, key do
+ File.binread path_for(key)
+ end
end
end
- end
- def delete(key)
- instrument :delete, key do
- begin
- File.delete path_for(key)
- rescue Errno::ENOENT
- # Ignore files already deleted
+ def delete(key)
+ instrument :delete, key do
+ begin
+ File.delete path_for(key)
+ rescue Errno::ENOENT
+ # Ignore files already deleted
+ end
end
end
- end
- def exist?(key)
- instrument :exist, key do |payload|
- answer = File.exist? path_for(key)
- payload[:exist] = answer
- answer
+ def exist?(key)
+ instrument :exist, key do |payload|
+ answer = File.exist? path_for(key)
+ payload[:exist] = answer
+ answer
+ end
end
- end
- def url(key, expires_in:, disposition:, filename:, content_type:)
- instrument :url, key do |payload|
- verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
-
- generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.rails_disk_service_path \
- verified_key_with_expiration,
- disposition: disposition, filename: filename, content_type: content_type
- else
- "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?disposition=#{disposition}&content_type=#{content_type}"
- end
+ def url(key, expires_in:, disposition:, filename:, content_type:)
+ instrument :url, key do |payload|
+ verified_key_with_expiration = ActiveStorage.verifier.generate(key, expires_in: expires_in, purpose: :blob_key)
+
+ generated_url =
+ if defined?(Rails.application)
+ Rails.application.routes.url_helpers.rails_disk_service_path \
+ verified_key_with_expiration,
+ disposition: disposition, filename: filename, content_type: content_type
+ else
+ "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?disposition=#{disposition}&content_type=#{content_type}"
+ end
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- verified_token_with_expiration = ActiveStorage.verifier.generate(
- {
- key: key,
- content_type: content_type,
- content_length: content_length,
- checksum: checksum
- },
- expires_in: expires_in,
- purpose: :blob_token
- )
-
- generated_url =
- if defined?(Rails.application)
- Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
- else
- "/rails/active_storage/disk/#{verified_token_with_expiration}"
- end
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ instrument :url, key do |payload|
+ verified_token_with_expiration = ActiveStorage.verifier.generate(
+ {
+ key: key,
+ content_type: content_type,
+ content_length: content_length,
+ checksum: checksum
+ },
+ expires_in: expires_in,
+ purpose: :blob_token
+ )
+
+ generated_url =
+ if defined?(Rails.application)
+ Rails.application.routes.url_helpers.update_rails_disk_service_path verified_token_with_expiration
+ else
+ "/rails/active_storage/disk/#{verified_token_with_expiration}"
+ end
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def headers_for_direct_upload(key, content_type:, **)
- { "Content-Type" => content_type }
- end
-
- private
- def path_for(key)
- File.join root, folder_for(key), key
+ def headers_for_direct_upload(key, content_type:, **)
+ { "Content-Type" => content_type }
end
- def folder_for(key)
- [ key[0..1], key[2..3] ].join("/")
- end
+ private
+ def path_for(key)
+ File.join root, folder_for(key), key
+ end
- def make_path_for(key)
- path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
- end
+ def folder_for(key)
+ [ key[0..1], key[2..3] ].join("/")
+ end
- def ensure_integrity_of(key, checksum)
- unless Digest::MD5.file(path_for(key)).base64digest == checksum
- delete key
- raise ActiveStorage::IntegrityError
+ def make_path_for(key)
+ path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
end
- end
+
+ def ensure_integrity_of(key, checksum)
+ unless Digest::MD5.file(path_for(key)).base64digest == checksum
+ delete key
+ raise ActiveStorage::IntegrityError
+ end
+ end
+ end
end
+
diff --git a/activestorage/lib/active_storage/service/gcs_service.rb b/activestorage/lib/active_storage/service/gcs_service.rb
index 73629f7486..ea4ec5a790 100644
--- a/activestorage/lib/active_storage/service/gcs_service.rb
+++ b/activestorage/lib/active_storage/service/gcs_service.rb
@@ -1,79 +1,81 @@
require "google/cloud/storage"
require "active_support/core_ext/object/to_query"
-# Wraps the Google Cloud Storage as a Active Storage service. See `ActiveStorage::Service` for the generic API
-# documentation that applies to all services.
-class ActiveStorage::Service::GCSService < ActiveStorage::Service
- attr_reader :client, :bucket
+module ActiveStorage
+ # Wraps the Google Cloud Storage as a Active Storage service. See `ActiveStorage::Service` for the generic API
+ # documentation that applies to all services.
+ class Service::GCSService < Service
+ attr_reader :client, :bucket
- def initialize(project:, keyfile:, bucket:)
- @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile)
- @bucket = @client.bucket(bucket)
- end
+ def initialize(project:, keyfile:, bucket:)
+ @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile)
+ @bucket = @client.bucket(bucket)
+ end
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- bucket.create_file(io, key, md5: checksum)
- rescue Google::Cloud::InvalidArgumentError
- raise ActiveStorage::IntegrityError
+ def upload(key, io, checksum: nil)
+ instrument :upload, key, checksum: checksum do
+ begin
+ bucket.create_file(io, key, md5: checksum)
+ rescue Google::Cloud::InvalidArgumentError
+ raise ActiveStorage::IntegrityError
+ end
end
end
- end
- # FIXME: Add streaming when given a block
- def download(key)
- instrument :download, key do
- io = file_for(key).download
- io.rewind
- io.read
+ # FIXME: Add streaming when given a block
+ def download(key)
+ instrument :download, key do
+ io = file_for(key).download
+ io.rewind
+ io.read
+ end
end
- end
- def delete(key)
- instrument :delete, key do
- file_for(key).try(:delete)
+ def delete(key)
+ instrument :delete, key do
+ file_for(key).try(:delete)
+ end
end
- end
- def exist?(key)
- instrument :exist, key do |payload|
- answer = file_for(key).present?
- payload[:exist] = answer
- answer
+ def exist?(key)
+ instrument :exist, key do |payload|
+ answer = file_for(key).present?
+ payload[:exist] = answer
+ answer
+ end
end
- end
- def url(key, expires_in:, disposition:, filename:, content_type:)
- instrument :url, key do |payload|
- generated_url = file_for(key).signed_url expires: expires_in, query: {
- "response-content-disposition" => "#{disposition}; filename=\"#{filename}\"",
- "response-content-type" => content_type
- }
+ def url(key, expires_in:, disposition:, filename:, content_type:)
+ instrument :url, key do |payload|
+ generated_url = file_for(key).signed_url expires: expires_in, query: {
+ "response-content-disposition" => "#{disposition}; filename=\"#{filename}\"",
+ "response-content-type" => content_type
+ }
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
- content_type: content_type, content_md5: checksum
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ instrument :url, key do |payload|
+ generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
+ content_type: content_type, content_md5: checksum
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum }
- end
-
- private
- def file_for(key)
- bucket.file(key)
+ def headers_for_direct_upload(key, content_type:, checksum:, **)
+ { "Content-Type" => content_type, "Content-MD5" => checksum }
end
+
+ private
+ def file_for(key)
+ bucket.file(key)
+ end
+ end
end
diff --git a/activestorage/lib/active_storage/service/mirror_service.rb b/activestorage/lib/active_storage/service/mirror_service.rb
index 7c407f2730..2403eeb1e9 100644
--- a/activestorage/lib/active_storage/service/mirror_service.rb
+++ b/activestorage/lib/active_storage/service/mirror_service.rb
@@ -1,46 +1,48 @@
require "active_support/core_ext/module/delegation"
-# Wraps a set of mirror services and provides a single `ActiveStorage::Service` object that will all
-# have the files uploaded to them. A `primary` service is designated to answer calls to `download`, `exists?`,
-# and `url`.
-class ActiveStorage::Service::MirrorService < ActiveStorage::Service
- attr_reader :primary, :mirrors
-
- delegate :download, :exist?, :url, to: :primary
-
- # Stitch together from named services.
- def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
- new \
- primary: configurator.build(primary),
- mirrors: mirrors.collect { |name| configurator.build name }
- end
-
- def initialize(primary:, mirrors:)
- @primary, @mirrors = primary, mirrors
- end
-
- # Upload the `io` to the `key` specified to all services. If a `checksum` is provided, all services will
- # ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
- def upload(key, io, checksum: nil)
- each_service.collect do |service|
- service.upload key, io.tap(&:rewind), checksum: checksum
+module ActiveStorage
+ # Wraps a set of mirror services and provides a single `ActiveStorage::Service` object that will all
+ # have the files uploaded to them. A `primary` service is designated to answer calls to `download`, `exists?`,
+ # and `url`.
+ class Service::MirrorService < Service
+ attr_reader :primary, :mirrors
+
+ delegate :download, :exist?, :url, to: :primary
+
+ # Stitch together from named services.
+ def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
+ new \
+ primary: configurator.build(primary),
+ mirrors: mirrors.collect { |name| configurator.build name }
end
- end
-
- # Delete the file at the `key` on all services.
- def delete(key)
- perform_across_services :delete, key
- end
- private
- def each_service(&block)
- [ primary, *mirrors ].each(&block)
+ def initialize(primary:, mirrors:)
+ @primary, @mirrors = primary, mirrors
end
- def perform_across_services(method, *args)
- # FIXME: Convert to be threaded
+ # Upload the `io` to the `key` specified to all services. If a `checksum` is provided, all services will
+ # ensure a match when the upload has completed or raise an `ActiveStorage::IntegrityError`.
+ def upload(key, io, checksum: nil)
each_service.collect do |service|
- service.public_send method, *args
+ service.upload key, io.tap(&:rewind), checksum: checksum
end
end
+
+ # Delete the file at the `key` on all services.
+ def delete(key)
+ perform_across_services :delete, key
+ end
+
+ private
+ def each_service(&block)
+ [ primary, *mirrors ].each(&block)
+ end
+
+ def perform_across_services(method, *args)
+ # FIXME: Convert to be threaded
+ each_service.collect do |service|
+ service.public_send method, *args
+ end
+ end
+ end
end
diff --git a/activestorage/lib/active_storage/service/s3_service.rb b/activestorage/lib/active_storage/service/s3_service.rb
index ca461c2994..5153f5db0d 100644
--- a/activestorage/lib/active_storage/service/s3_service.rb
+++ b/activestorage/lib/active_storage/service/s3_service.rb
@@ -1,96 +1,98 @@
require "aws-sdk"
require "active_support/core_ext/numeric/bytes"
-# Wraps the Amazon Simple Storage Service (S3) as a Active Storage service.
-# See `ActiveStorage::Service` for the generic API documentation that applies to all services.
-class ActiveStorage::Service::S3Service < ActiveStorage::Service
- attr_reader :client, :bucket, :upload_options
+module ActiveStorage
+ # Wraps the Amazon Simple Storage Service (S3) as a Active Storage service.
+ # See `ActiveStorage::Service` for the generic API documentation that applies to all services.
+ class Service::S3Service < Service
+ attr_reader :client, :bucket, :upload_options
- def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
- @bucket = @client.bucket(bucket)
+ def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
+ @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
+ @bucket = @client.bucket(bucket)
- @upload_options = upload
- end
+ @upload_options = upload
+ end
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
- rescue Aws::S3::Errors::BadDigest
- raise ActiveStorage::IntegrityError
+ def upload(key, io, checksum: nil)
+ instrument :upload, key, checksum: checksum do
+ begin
+ object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
+ rescue Aws::S3::Errors::BadDigest
+ raise ActiveStorage::IntegrityError
+ end
end
end
- end
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- stream(key, &block)
- end
- else
- instrument :download, key do
- object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ def download(key)
+ if block_given?
+ instrument :streaming_download, key do
+ stream(key, &block)
+ end
+ else
+ instrument :download, key do
+ object_for(key).get.body.read.force_encoding(Encoding::BINARY)
+ end
end
end
- end
- def delete(key)
- instrument :delete, key do
- object_for(key).delete
+ def delete(key)
+ instrument :delete, key do
+ object_for(key).delete
+ end
end
- end
- def exist?(key)
- instrument :exist, key do |payload|
- answer = object_for(key).exists?
- payload[:exist] = answer
- answer
+ def exist?(key)
+ instrument :exist, key do |payload|
+ answer = object_for(key).exists?
+ payload[:exist] = answer
+ answer
+ end
end
- end
- def url(key, expires_in:, disposition:, filename:, content_type:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
- response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
- response_content_type: content_type
+ def url(key, expires_in:, disposition:, filename:, content_type:)
+ instrument :url, key do |payload|
+ generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
+ response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
+ response_content_type: content_type
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
- content_type: content_type, content_length: content_length, content_md5: checksum
+ def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
+ instrument :url, key do |payload|
+ generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
+ content_type: content_type, content_length: content_length, content_md5: checksum
- payload[:url] = generated_url
+ payload[:url] = generated_url
- generated_url
+ generated_url
+ end
end
- end
-
- def headers_for_direct_upload(key, content_type:, checksum:, **)
- { "Content-Type" => content_type, "Content-MD5" => checksum }
- end
- private
- def object_for(key)
- bucket.object(key)
+ def headers_for_direct_upload(key, content_type:, checksum:, **)
+ { "Content-Type" => content_type, "Content-MD5" => checksum }
end
- # Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
- object = object_for(key)
+ private
+ def object_for(key)
+ bucket.object(key)
+ end
+
+ # Reads the object for the given key in chunks, yielding each to the block.
+ def stream(key, options = {}, &block)
+ object = object_for(key)
- chunk_size = 5.megabytes
- offset = 0
+ chunk_size = 5.megabytes
+ offset = 0
- while offset < object.content_length
- yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
- offset += chunk_size
+ while offset < object.content_length
+ yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
+ offset += chunk_size
+ end
end
- end
+ end
end