aboutsummaryrefslogblamecommitdiffstats
path: root/lib/active_storage/service/s3_service.rb
blob: c94f5ddc6384d84da243d8c0a6bc089b4653d25e (plain) (tree)
1
2
3
4
5
6
7
8
9
10

                 
                                                                






                                                                                                                       

                                 





                         
                                   






                          
                 
                           


     

                                                               
                                                                              

     
                    
                        


                   
                                    











                                                                               
                              







                                                                                                
require "aws-sdk"

class ActiveStorage::Service::S3Service < ActiveStorage::Service
  attr_reader :client, :bucket

  def initialize(access_key_id:, secret_access_key:, region:, bucket:)
    @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region)
    @bucket = @client.bucket(bucket)
  end

  def upload(key, io)
    object_for(key).put(body: io)
  end

  def download(key)
    if block_given?
      stream(key, &block)
    else
      object_for(key).get.body.read
    end
  end

  def delete(key)
    object_for(key).delete
  end

  def exist?(key)
    object_for(key).exists?
  end


  def url(key, expires_in:, disposition:, filename:)
    object_for(key).presigned_url :get, expires_in: expires_in,
      response_content_disposition: "#{disposition}; filename=\"#{filename}\""
  end

  def byte_size(key)
    object_for(key).size
  end

  def checksum(key)
    object_for(key).etag.remove(/"/)
  end


  private
    def object_for(key)
      bucket.object(key)
    end

    # Reads the object for the given key in chunks, yielding each to the block.
    def stream(key, options = {}, &block)
      object = object_for(key)

      chunk_size = 5.megabytes
      offset = 0

      while offset < object.content_length
        yield object.read(options.merge(:range => "bytes=#{offset}-#{offset + chunk_size - 1}"))
        offset += chunk_size
      end
    end
end