aboutsummaryrefslogtreecommitdiffstats
path: root/lib/active_storage/service
diff options
context:
space:
mode:
authorDavid Heinemeier Hansson <david@loudthinking.com>2017-07-22 09:47:24 -0500
committerDavid Heinemeier Hansson <david@loudthinking.com>2017-07-22 09:47:24 -0500
commitd50679f4eefde1aca1ab71ba3c0109739cfdff3f (patch)
treeac9034fe7c4aa64cd5e90ecebc346d478917387c /lib/active_storage/service
parent5b7c31c23a708de77b3d73b68aec0ba99c8be861 (diff)
downloadrails-d50679f4eefde1aca1ab71ba3c0109739cfdff3f.tar.gz
rails-d50679f4eefde1aca1ab71ba3c0109739cfdff3f.tar.bz2
rails-d50679f4eefde1aca1ab71ba3c0109739cfdff3f.zip
Move models and jobs to the app setup
Follow engine conventions more closely
Diffstat (limited to 'lib/active_storage/service')
-rw-r--r--lib/active_storage/service/configurator.rb28
-rw-r--r--lib/active_storage/service/disk_service.rb89
-rw-r--r--lib/active_storage/service/gcs_service.rb71
-rw-r--r--lib/active_storage/service/mirror_service.rb40
-rw-r--r--lib/active_storage/service/s3_service.rb89
5 files changed, 0 insertions, 317 deletions
diff --git a/lib/active_storage/service/configurator.rb b/lib/active_storage/service/configurator.rb
deleted file mode 100644
index 00ae24d251..0000000000
--- a/lib/active_storage/service/configurator.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-class ActiveStorage::Service::Configurator #:nodoc:
- attr_reader :configurations
-
- def self.build(service_name, configurations)
- new(configurations).build(service_name)
- end
-
- def initialize(configurations)
- @configurations = configurations.deep_symbolize_keys
- end
-
- def build(service_name)
- config = config_for(service_name.to_sym)
- resolve(config.fetch(:service)).build(**config, configurator: self)
- end
-
- private
- def config_for(name)
- configurations.fetch name do
- raise "Missing configuration for the #{name.inspect} Active Storage service. Configurations available for #{configurations.keys.inspect}"
- end
- end
-
- def resolve(class_name)
- require "active_storage/service/#{class_name.to_s.downcase}_service"
- ActiveStorage::Service.const_get(:"#{class_name}Service")
- end
-end
diff --git a/lib/active_storage/service/disk_service.rb b/lib/active_storage/service/disk_service.rb
deleted file mode 100644
index a2a27528c1..0000000000
--- a/lib/active_storage/service/disk_service.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-require "fileutils"
-require "pathname"
-require "digest/md5"
-require "active_support/core_ext/numeric/bytes"
-
-class ActiveStorage::Service::DiskService < ActiveStorage::Service
- attr_reader :root
-
- def initialize(root:)
- @root = root
- end
-
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- IO.copy_stream(io, make_path_for(key))
- ensure_integrity_of(key, checksum) if checksum
- end
- end
-
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- File.open(path_for(key), "rb") do |file|
- while data = file.read(64.kilobytes)
- yield data
- end
- end
- end
- else
- instrument :download, key do
- File.binread path_for(key)
- end
- end
- end
-
- def delete(key)
- instrument :delete, key do
- begin
- File.delete path_for(key)
- rescue Errno::ENOENT
- # Ignore files already deleted
- end
- end
- end
-
- def exist?(key)
- instrument :exist, key do |payload|
- answer = File.exist? path_for(key)
- payload[:exist] = answer
- answer
- end
- end
-
- def url(key, expires_in:, disposition:, filename:)
- instrument :url, key do |payload|
- verified_key_with_expiration = ActiveStorage::VerifiedKeyWithExpiration.encode(key, expires_in: expires_in)
-
- generated_url =
- if defined?(Rails) && defined?(Rails.application)
- Rails.application.routes.url_helpers.rails_disk_blob_path(verified_key_with_expiration, disposition: disposition, filename: filename)
- else
- "/rails/active_storage/disk/#{verified_key_with_expiration}/#{filename}?disposition=#{disposition}"
- end
-
- payload[:url] = generated_url
-
- generated_url
- end
- end
-
- private
- def path_for(key)
- File.join root, folder_for(key), key
- end
-
- def folder_for(key)
- [ key[0..1], key[2..3] ].join("/")
- end
-
- def make_path_for(key)
- path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) }
- end
-
- def ensure_integrity_of(key, checksum)
- unless Digest::MD5.file(path_for(key)).base64digest == checksum
- raise ActiveStorage::IntegrityError
- end
- end
-end
diff --git a/lib/active_storage/service/gcs_service.rb b/lib/active_storage/service/gcs_service.rb
deleted file mode 100644
index 7053a130c0..0000000000
--- a/lib/active_storage/service/gcs_service.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-require "google/cloud/storage"
-require "active_support/core_ext/object/to_query"
-
-class ActiveStorage::Service::GCSService < ActiveStorage::Service
- attr_reader :client, :bucket
-
- def initialize(project:, keyfile:, bucket:)
- @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile)
- @bucket = @client.bucket(bucket)
- end
-
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- bucket.create_file(io, key, md5: checksum)
- rescue Google::Cloud::InvalidArgumentError
- raise ActiveStorage::IntegrityError
- end
- end
- end
-
- # FIXME: Add streaming when given a block
- def download(key)
- instrument :download, key do
- io = file_for(key).download
- io.rewind
- io.read
- end
- end
-
- def delete(key)
- instrument :delete, key do
- file_for(key)&.delete
- end
- end
-
- def exist?(key)
- instrument :exist, key do |payload|
- answer = file_for(key).present?
- payload[:exist] = answer
- answer
- end
- end
-
- def url(key, expires_in:, disposition:, filename:)
- instrument :url, key do |payload|
- query = { "response-content-disposition" => "#{disposition}; filename=\"#{filename}\"" }
- generated_url = file_for(key).signed_url(expires: expires_in, query: query)
-
- payload[:url] = generated_url
-
- generated_url
- end
- end
-
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:)
- instrument :url, key do |payload|
- generated_url = bucket.signed_url key, method: "PUT", expires: expires_in,
- content_type: content_type
-
- payload[:url] = generated_url
-
- generated_url
- end
- end
-
- private
- def file_for(key)
- bucket.file(key)
- end
-end
diff --git a/lib/active_storage/service/mirror_service.rb b/lib/active_storage/service/mirror_service.rb
deleted file mode 100644
index 54465cad05..0000000000
--- a/lib/active_storage/service/mirror_service.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-require "active_support/core_ext/module/delegation"
-
-class ActiveStorage::Service::MirrorService < ActiveStorage::Service
- attr_reader :primary, :mirrors
-
- delegate :download, :exist?, :url, to: :primary
-
- # Stitch together from named services.
- def self.build(primary:, mirrors:, configurator:, **options) #:nodoc:
- new \
- primary: configurator.build(primary),
- mirrors: mirrors.collect { |name| configurator.build name }
- end
-
- def initialize(primary:, mirrors:)
- @primary, @mirrors = primary, mirrors
- end
-
- def upload(key, io, checksum: nil)
- each_service.collect do |service|
- service.upload key, io.tap(&:rewind), checksum: checksum
- end
- end
-
- def delete(key)
- perform_across_services :delete, key
- end
-
- private
- def each_service(&block)
- [ primary, *mirrors ].each(&block)
- end
-
- def perform_across_services(method, *args)
- # FIXME: Convert to be threaded
- each_service.collect do |service|
- service.public_send method, *args
- end
- end
-end
diff --git a/lib/active_storage/service/s3_service.rb b/lib/active_storage/service/s3_service.rb
deleted file mode 100644
index efffdec157..0000000000
--- a/lib/active_storage/service/s3_service.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-require "aws-sdk"
-require "active_support/core_ext/numeric/bytes"
-
-class ActiveStorage::Service::S3Service < ActiveStorage::Service
- attr_reader :client, :bucket, :upload_options
-
- def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
- @bucket = @client.bucket(bucket)
-
- @upload_options = upload
- end
-
- def upload(key, io, checksum: nil)
- instrument :upload, key, checksum: checksum do
- begin
- object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
- rescue Aws::S3::Errors::BadDigest
- raise ActiveStorage::IntegrityError
- end
- end
- end
-
- def download(key)
- if block_given?
- instrument :streaming_download, key do
- stream(key, &block)
- end
- else
- instrument :download, key do
- object_for(key).get.body.read.force_encoding(Encoding::BINARY)
- end
- end
- end
-
- def delete(key)
- instrument :delete, key do
- object_for(key).delete
- end
- end
-
- def exist?(key)
- instrument :exist, key do |payload|
- answer = object_for(key).exists?
- payload[:exist] = answer
- answer
- end
- end
-
- def url(key, expires_in:, disposition:, filename:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
- response_content_disposition: "#{disposition}; filename=\"#{filename}\""
-
- payload[:url] = generated_url
-
- generated_url
- end
- end
-
- def url_for_direct_upload(key, expires_in:, content_type:, content_length:)
- instrument :url, key do |payload|
- generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
- content_type: content_type, content_length: content_length
-
- payload[:url] = generated_url
-
- generated_url
- end
- end
-
- private
- def object_for(key)
- bucket.object(key)
- end
-
- # Reads the object for the given key in chunks, yielding each to the block.
- def stream(key, options = {}, &block)
- object = object_for(key)
-
- chunk_size = 5.megabytes
- offset = 0
-
- while offset < object.content_length
- yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
- offset += chunk_size
- end
- end
-end