1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
|
# frozen_string_literal: true
gem "aws-sdk-s3", "~> 1.14"
require "aws-sdk-s3"
require "active_support/core_ext/numeric/bytes"
module ActiveStorage
# Wraps the Amazon Simple Storage Service (S3) as an Active Storage service.
# See ActiveStorage::Service for the generic API documentation that applies to all services.
class Service::S3Service < Service
attr_reader :client, :bucket
attr_reader :multipart_upload_threshold, :upload_options
def initialize(bucket:, upload: {}, **options)
@client = Aws::S3::Resource.new(**options)
@bucket = @client.bucket(bucket)
@multipart_upload_threshold = upload.fetch(:multipart_threshold, 100.megabytes)
@upload_options = upload
end
def upload(key, io, checksum: nil, filename: nil, content_type: nil, disposition: nil, **)
instrument :upload, key: key, checksum: checksum do
content_disposition = content_disposition_with(filename: filename, type: disposition) if disposition && filename
if io.size < multipart_upload_threshold
upload_with_single_part key, io, checksum: checksum, content_type: content_type, content_disposition: content_disposition
else
upload_with_multipart key, io, content_type: content_type, content_disposition: content_disposition
end
end
end
def download(key, &block)
if block_given?
instrument :streaming_download, key: key do
stream(key, &block)
end
else
instrument :download, key: key do
object_for(key).get.body.string.force_encoding(Encoding::BINARY)
rescue Aws::S3::Errors::NoSuchKey
raise ActiveStorage::FileNotFoundError
end
end
end
def download_chunk(key, range)
instrument :download_chunk, key: key, range: range do
object_for(key).get(range: "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body.string.force_encoding(Encoding::BINARY)
rescue Aws::S3::Errors::NoSuchKey
raise ActiveStorage::FileNotFoundError
end
end
def delete(key)
instrument :delete, key: key do
object_for(key).delete
end
end
def delete_prefixed(prefix)
instrument :delete_prefixed, prefix: prefix do
bucket.objects(prefix: prefix).batch_delete!
end
end
def exist?(key)
instrument :exist, key: key do |payload|
answer = object_for(key).exists?
payload[:exist] = answer
answer
end
end
def url(key, expires_in:, filename:, disposition:, content_type:)
instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :get, expires_in: expires_in.to_i,
response_content_disposition: content_disposition_with(type: disposition, filename: filename),
response_content_type: content_type
payload[:url] = generated_url
generated_url
end
end
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
instrument :url, key: key do |payload|
generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
content_type: content_type, content_length: content_length, content_md5: checksum
payload[:url] = generated_url
generated_url
end
end
def headers_for_direct_upload(key, content_type:, checksum:, **)
{ "Content-Type" => content_type, "Content-MD5" => checksum }
end
private
MAXIMUM_UPLOAD_PARTS_COUNT = 10000
MINIMUM_UPLOAD_PART_SIZE = 5.megabytes
def upload_with_single_part(key, io, checksum: nil, content_type: nil, content_disposition: nil)
object_for(key).put(body: io, content_md5: checksum, content_type: content_type, content_disposition: content_disposition, **upload_options)
rescue Aws::S3::Errors::BadDigest
raise ActiveStorage::IntegrityError
end
def upload_with_multipart(key, io, content_type: nil, content_disposition: nil)
part_size = [ io.size.fdiv(MAXIMUM_UPLOAD_PARTS_COUNT).ceil, MINIMUM_UPLOAD_PART_SIZE ].max
object_for(key).upload_stream(content_type: content_type, content_disposition: content_disposition, part_size: part_size, **upload_options) do |out|
IO.copy_stream(io, out)
end
end
def object_for(key)
bucket.object(key)
end
# Reads the object for the given key in chunks, yielding each to the block.
def stream(key)
object = object_for(key)
chunk_size = 5.megabytes
offset = 0
raise ActiveStorage::FileNotFoundError unless object.exists?
while offset < object.content_length
yield object.get(range: "bytes=#{offset}-#{offset + chunk_size - 1}").body.string.force_encoding(Encoding::BINARY)
offset += chunk_size
end
end
end
end
|