diff options
author | David Heinemeier Hansson <david@loudthinking.com> | 2017-07-04 16:44:50 +0200 |
---|---|---|
committer | David Heinemeier Hansson <david@loudthinking.com> | 2017-07-04 16:44:50 +0200 |
commit | efd950ae706cfbb55dffebd5d0c85e30acfd7a45 (patch) | |
tree | 49830af75a1fc0a0ec2c634f060fb984908a7e38 /lib/active_file/site | |
parent | ccaba581c0cf8653f61ce212667eaa1cc6f0a28e (diff) | |
download | rails-efd950ae706cfbb55dffebd5d0c85e30acfd7a45.tar.gz rails-efd950ae706cfbb55dffebd5d0c85e30acfd7a45.tar.bz2 rails-efd950ae706cfbb55dffebd5d0c85e30acfd7a45.zip |
Use lazy-loaded factory method for site configuration
Diffstat (limited to 'lib/active_file/site')
-rw-r--r-- | lib/active_file/site/disk_site.rb | 72 | ||||
-rw-r--r-- | lib/active_file/site/gcs_site.rb | 47 | ||||
-rw-r--r-- | lib/active_file/site/mirror_site.rb | 44 | ||||
-rw-r--r-- | lib/active_file/site/s3_site.rb | 62 |
4 files changed, 225 insertions, 0 deletions
diff --git a/lib/active_file/site/disk_site.rb b/lib/active_file/site/disk_site.rb new file mode 100644 index 0000000000..1fa77029c7 --- /dev/null +++ b/lib/active_file/site/disk_site.rb @@ -0,0 +1,72 @@ +require "fileutils" +require "pathname" + +class ActiveFile::Site::DiskSite < ActiveFile::Site + attr_reader :root + + def initialize(root:) + @root = root + end + + + def upload(key, io) + File.open(make_path_for(key), "wb") do |file| + while chunk = io.read(65536) + file.write(chunk) + end + end + end + + def download(key) + if block_given? + File.open(path_for(key)) do |file| + while data = file.read(65536) + yield data + end + end + else + File.open path_for(key), &:read + end + end + + def delete(key) + File.delete path_for(key) rescue Errno::ENOENT # Ignore files already deleted + end + + def exist?(key) + File.exist? path_for(key) + end + + + def url(key, disposition:, expires_in: nil) + verified_key_with_expiration = ActiveFile::VerifiedKeyWithExpiration.encode(key, expires_in: expires_in) + + if defined?(Rails) + Rails.application.routes.url_helpers.rails_disk_blob_path(verified_key_with_expiration) + else + "/rails/blobs/#{verified_key_with_expiration}" + end + end + + def byte_size(key) + File.size path_for(key) + end + + def checksum(key) + Digest::MD5.file(path_for(key)).hexdigest + end + + + private + def path_for(key) + File.join root, folder_for(key), key + end + + def folder_for(key) + [ key[0..1], key[2..3] ].join("/") + end + + def make_path_for(key) + path_for(key).tap { |path| FileUtils.mkdir_p File.dirname(path) } + end +end diff --git a/lib/active_file/site/gcs_site.rb b/lib/active_file/site/gcs_site.rb new file mode 100644 index 0000000000..c5f3d634cf --- /dev/null +++ b/lib/active_file/site/gcs_site.rb @@ -0,0 +1,47 @@ +require "google/cloud/storage" + +class ActiveFile::Site::GCSSite < ActiveFile::Site + attr_reader :client, :bucket + + def initialize(project:, keyfile:, bucket:) + @client = Google::Cloud::Storage.new(project: project, keyfile: keyfile) + @bucket = @client.bucket(bucket) + end + + def upload(key, io) + bucket.create_file(io, key) + end + + def download(key) + io = file_for(key).download + io.rewind + io.read + end + + def delete(key) + file_for(key).try(:delete) + end + + def exist?(key) + file_for(key).present? + end + + + def byte_size(key) + file_for(key).size + end + + def checksum(key) + convert_to_hex base64: file_for(key).md5 + end + + + private + def file_for(key) + bucket.file(key) + end + + def convert_to_hex(base64:) + base64.unpack("m0").first.unpack("H*").first + end +end diff --git a/lib/active_file/site/mirror_site.rb b/lib/active_file/site/mirror_site.rb new file mode 100644 index 0000000000..65f28cd437 --- /dev/null +++ b/lib/active_file/site/mirror_site.rb @@ -0,0 +1,44 @@ +class ActiveFile::Site::MirrorSite < ActiveFile::Site + attr_reader :sites + + def initialize(sites:) + @sites = sites + end + + def upload(key, io) + perform_across_sites :upload, key, io + end + + def download(key) + sites.detect { |site| site.exist?(key) }.download(key) + end + + def delete(key) + perform_across_sites :delete, key + end + + def exist?(key) + perform_across_sites(:exist?, key).any? + end + + + def byte_size(key) + primary_site.byte_size(key) + end + + def checksum(key) + primary_site.checksum(key) + end + + private + def primary_site + sites.first + end + + def perform_across_sites(method, **args) + # FIXME: Convert to be threaded + sites.collect do |site| + site.send method, **args + end + end +end diff --git a/lib/active_file/site/s3_site.rb b/lib/active_file/site/s3_site.rb new file mode 100644 index 0000000000..7bb8197245 --- /dev/null +++ b/lib/active_file/site/s3_site.rb @@ -0,0 +1,62 @@ +require "aws-sdk" + +class ActiveFile::Site::S3Site < ActiveFile::Site + attr_reader :client, :bucket + + def initialize(access_key_id:, secret_access_key:, region:, bucket:) + @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region) + @bucket = @client.bucket(bucket) + end + + def upload(key, io) + object_for(key).put(body: io) + end + + def download(key) + if block_given? + stream(key, &block) + else + object_for(key).get.body.read + end + end + + def delete(key) + object_for(key).delete + end + + def exist?(key) + object_for(key).exists? + end + + + def url(key, disposition: :inline, expires_in: nil) + object_for(key).presigned_url(:get, expires_in: expires_in) + end + + def byte_size(key) + object_for(key).size + end + + def checksum(key) + object_for(key).etag.remove(/"/) + end + + + private + def object_for(key) + bucket.object(key) + end + + # Reads the object for the given key in chunks, yielding each to the block. + def stream(key, options = {}, &block) + object = object_for(key) + + chunk_size = 5242880 # 5 megabytes + offset = 0 + + while offset < object.content_length + yield object.read(options.merge(:range => "bytes=#{offset}-#{offset + chunk_size - 1}")) + offset += chunk_size + end + end +end |