1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
|
require "aws-sdk"
require "active_support/core_ext/numeric/bytes"
class ActiveStorage::Service::S3Service < ActiveStorage::Service
attr_reader :client, :bucket, :upload_options
def initialize(access_key_id:, secret_access_key:, region:, bucket:, upload: {}, **options)
@client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region, **options)
@bucket = @client.bucket(bucket)
@upload_options = upload
end
def upload(key, io, checksum: nil)
instrument :upload, key, checksum: checksum do
begin
object_for(key).put(upload_options.merge(body: io, content_md5: checksum))
rescue Aws::S3::Errors::BadDigest
raise ActiveStorage::IntegrityError
end
end
end
def download(key)
if block_given?
instrument :streaming_download, key do
stream(key, &block)
end
else
instrument :download, key do
object_for(key).get.body.read.force_encoding(Encoding::BINARY)
end
end
end
def delete(key)
instrument :delete, key do
object_for(key).delete
end
end
def exist?(key)
instrument :exist, key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, disposition:, filename:, content_type:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in,
response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
response_content_type: content_type
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:)
instrument :url, key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in,
content_type: content_type, content_length: content_length
payload[:url] = generated_url
generated_url
end
end
private
def object_for(key)
bucket.object(key)
end
# Reads the object for the given key in chunks, yielding each to the block.
def stream(key, options = {}, &block)
object = object_for(key)
chunk_size = 5.megabytes
offset = 0
while offset < object.content_length
yield object.read(options.merge(range: "bytes=#{offset}-#{offset + chunk_size - 1}"))
offset += chunk_size
end
end
end
|